comfyui-node 1.5.0 → 1.6.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/dist/.tsbuildinfo +1 -1
- package/dist/call-wrapper.d.ts.map +1 -1
- package/dist/call-wrapper.js +133 -37
- package/dist/call-wrapper.js.map +1 -1
- package/dist/index.d.ts +3 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/index.js.map +1 -1
- package/dist/multipool/client-registry.d.ts +12 -0
- package/dist/multipool/client-registry.d.ts.map +1 -0
- package/dist/multipool/client-registry.js +26 -0
- package/dist/multipool/client-registry.js.map +1 -0
- package/dist/multipool/index.d.ts +2 -0
- package/dist/multipool/index.d.ts.map +1 -0
- package/dist/multipool/index.js +2 -0
- package/dist/multipool/index.js.map +1 -0
- package/dist/multipool/interfaces.d.ts +5 -0
- package/dist/multipool/interfaces.d.ts.map +1 -0
- package/dist/multipool/interfaces.js +2 -0
- package/dist/multipool/interfaces.js.map +1 -0
- package/dist/multipool/job-queue-processor.d.ts +12 -0
- package/dist/multipool/job-queue-processor.d.ts.map +1 -0
- package/dist/multipool/job-queue-processor.js +10 -0
- package/dist/multipool/job-queue-processor.js.map +1 -0
- package/dist/multipool/multi-workflow-pool.d.ts +24 -0
- package/dist/multipool/multi-workflow-pool.d.ts.map +1 -0
- package/dist/multipool/multi-workflow-pool.js +73 -0
- package/dist/multipool/multi-workflow-pool.js.map +1 -0
- package/dist/multipool/pool-event-manager.d.ts +11 -0
- package/dist/multipool/pool-event-manager.d.ts.map +1 -0
- package/dist/multipool/pool-event-manager.js +28 -0
- package/dist/multipool/pool-event-manager.js.map +1 -0
- package/dist/multipool/tests/job-state-registry.d.ts +17 -0
- package/dist/multipool/tests/job-state-registry.d.ts.map +1 -0
- package/dist/multipool/tests/job-state-registry.js +24 -0
- package/dist/multipool/tests/job-state-registry.js.map +1 -0
- package/dist/multipool/tests/multipool-basic.d.ts +2 -0
- package/dist/multipool/tests/multipool-basic.d.ts.map +1 -0
- package/dist/multipool/tests/multipool-basic.js +4 -0
- package/dist/multipool/tests/multipool-basic.js.map +1 -0
- package/dist/multipool/workflow.d.ts +174 -0
- package/dist/multipool/workflow.d.ts.map +1 -0
- package/dist/multipool/workflow.js +272 -0
- package/dist/multipool/workflow.js.map +1 -0
- package/dist/pool/SmartPool.d.ts +144 -0
- package/dist/pool/SmartPool.d.ts.map +1 -0
- package/dist/pool/SmartPool.js +677 -0
- package/dist/pool/SmartPool.js.map +1 -0
- package/dist/pool/SmartPoolV2.d.ts +120 -0
- package/dist/pool/SmartPoolV2.d.ts.map +1 -0
- package/dist/pool/SmartPoolV2.js +587 -0
- package/dist/pool/SmartPoolV2.js.map +1 -0
- package/dist/pool/WorkflowPool.d.ts +23 -0
- package/dist/pool/WorkflowPool.d.ts.map +1 -1
- package/dist/pool/WorkflowPool.js +206 -59
- package/dist/pool/WorkflowPool.js.map +1 -1
- package/dist/pool/client/ClientManager.d.ts +4 -2
- package/dist/pool/client/ClientManager.d.ts.map +1 -1
- package/dist/pool/client/ClientManager.js +29 -9
- package/dist/pool/client/ClientManager.js.map +1 -1
- package/dist/pool/index.d.ts +2 -0
- package/dist/pool/index.d.ts.map +1 -1
- package/dist/pool/index.js +2 -0
- package/dist/pool/index.js.map +1 -1
- package/dist/pool/queue/QueueAdapter.d.ts +2 -0
- package/dist/pool/queue/QueueAdapter.d.ts.map +1 -1
- package/dist/pool/queue/adapters/memory.d.ts +2 -0
- package/dist/pool/queue/adapters/memory.d.ts.map +1 -1
- package/dist/pool/queue/adapters/memory.js +14 -2
- package/dist/pool/queue/adapters/memory.js.map +1 -1
- package/dist/pool/types/affinity.d.ts +6 -0
- package/dist/pool/types/affinity.d.ts.map +1 -0
- package/dist/pool/types/affinity.js +2 -0
- package/dist/pool/types/affinity.js.map +1 -0
- package/dist/pool/types/job.d.ts.map +1 -1
- package/package.json +2 -1
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { ClientRegistry } from "src/multipool/client-registry.js";
|
|
2
|
+
import { PoolEventManager } from "src/multipool/pool-event-manager.js";
|
|
3
|
+
import { JobStateRegistry } from "src/multipool/tests/job-state-registry.js";
|
|
4
|
+
import { JobQueueProcessor } from "src/multipool/job-queue-processor.js";
|
|
5
|
+
/**
|
|
6
|
+
* MultiWorkflowPool class to manage heterogeneous clusters of ComfyUI workers with different workflow capabilities.
|
|
7
|
+
* Using a fully event driven architecture to handle client connections, job submissions, and failover strategies.
|
|
8
|
+
* Zero polling is used; all operations are event driven. Maximizes responsiveness and scalability.
|
|
9
|
+
*/
|
|
10
|
+
export class MultiWorkflowPool {
|
|
11
|
+
// Event manager for handling pool events
|
|
12
|
+
events;
|
|
13
|
+
// Registry for managing clients in the pool
|
|
14
|
+
clientRegistry;
|
|
15
|
+
// Registry for managing job state
|
|
16
|
+
jobRegistry;
|
|
17
|
+
// Multi queue map, one per workflow based on the workflow hash
|
|
18
|
+
queues = new Map();
|
|
19
|
+
constructor() {
|
|
20
|
+
this.events = new PoolEventManager(this);
|
|
21
|
+
this.clientRegistry = new ClientRegistry(this);
|
|
22
|
+
this.jobRegistry = new JobStateRegistry(this);
|
|
23
|
+
}
|
|
24
|
+
// PUBLIC API
|
|
25
|
+
async init() {
|
|
26
|
+
}
|
|
27
|
+
async shutdown() {
|
|
28
|
+
}
|
|
29
|
+
addClient(clientUrl) {
|
|
30
|
+
this.clientRegistry.addClient(clientUrl);
|
|
31
|
+
}
|
|
32
|
+
removeClient(clientUrl) {
|
|
33
|
+
this.clientRegistry.removeClient(clientUrl);
|
|
34
|
+
}
|
|
35
|
+
async submitJob(workflow) {
|
|
36
|
+
let workflowHash = workflow.structureHash;
|
|
37
|
+
if (!workflowHash) {
|
|
38
|
+
workflow.updateHash();
|
|
39
|
+
workflowHash = workflow.structureHash;
|
|
40
|
+
}
|
|
41
|
+
const queue = this.assertQueue(workflowHash);
|
|
42
|
+
if (!queue) {
|
|
43
|
+
throw new Error("Failed to create or retrieve job queue for workflow.");
|
|
44
|
+
}
|
|
45
|
+
const newJobId = this.jobRegistry.addJob(workflow);
|
|
46
|
+
queue.enqueueJob(newJobId, workflow);
|
|
47
|
+
return newJobId;
|
|
48
|
+
}
|
|
49
|
+
getJobStatus(jobId) {
|
|
50
|
+
return this.jobRegistry.getJobStatus(jobId);
|
|
51
|
+
}
|
|
52
|
+
async cancelJob(jobId) {
|
|
53
|
+
this.jobRegistry.cancelJob(jobId);
|
|
54
|
+
}
|
|
55
|
+
attachEventHook(event, listener) {
|
|
56
|
+
if (event && listener) {
|
|
57
|
+
this.events.attachHook(event, listener);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
// PRIVATE METHODS
|
|
61
|
+
assertQueue(workflowHash) {
|
|
62
|
+
if (!workflowHash) {
|
|
63
|
+
return null;
|
|
64
|
+
}
|
|
65
|
+
let queue = this.queues.get(workflowHash);
|
|
66
|
+
if (!queue) {
|
|
67
|
+
queue = new JobQueueProcessor(this);
|
|
68
|
+
this.queues.set(workflowHash, queue);
|
|
69
|
+
}
|
|
70
|
+
return queue;
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
//# sourceMappingURL=multi-workflow-pool.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"multi-workflow-pool.js","sourceRoot":"","sources":["../../src/multipool/multi-workflow-pool.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,kCAAkC,CAAC;AAClE,OAAO,EAAE,gBAAgB,EAAE,MAAM,qCAAqC,CAAC;AACvE,OAAO,EAAE,gBAAgB,EAAE,MAAM,2CAA2C,CAAC;AAC7E,OAAO,EAAE,iBAAiB,EAAE,MAAM,sCAAsC,CAAC;AAIzE;;;;GAIG;AACH,MAAM,OAAO,iBAAiB;IAE5B,yCAAyC;IACjC,MAAM,CAAmB;IAEjC,4CAA4C;IACpC,cAAc,CAAiB;IAEvC,kCAAkC;IAC1B,WAAW,CAAmB;IAEtC,+DAA+D;IACvD,MAAM,GAAmC,IAAI,GAAG,EAAE,CAAC;IAE3D;QACE,IAAI,CAAC,MAAM,GAAG,IAAI,gBAAgB,CAAC,IAAI,CAAC,CAAC;QACzC,IAAI,CAAC,cAAc,GAAG,IAAI,cAAc,CAAC,IAAI,CAAC,CAAC;QAC/C,IAAI,CAAC,WAAW,GAAG,IAAI,gBAAgB,CAAC,IAAI,CAAC,CAAC;IAChD,CAAC;IAED,aAAa;IACb,KAAK,CAAC,IAAI;IACV,CAAC;IAED,KAAK,CAAC,QAAQ;IACd,CAAC;IAED,SAAS,CAAC,SAAiB;QACzB,IAAI,CAAC,cAAc,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC;IAC3C,CAAC;IAED,YAAY,CAAC,SAAiB;QAC5B,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC;IAC9C,CAAC;IAED,KAAK,CAAC,SAAS,CAAC,QAAkB;QAChC,IAAI,YAAY,GAAG,QAAQ,CAAC,aAAa,CAAC;QAC1C,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,QAAQ,CAAC,UAAU,EAAE,CAAC;YACtB,YAAY,GAAG,QAAQ,CAAC,aAAa,CAAC;QACxC,CAAC;QAED,MAAM,KAAK,GAAG,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,CAAC;QAC7C,IAAI,CAAC,KAAK,EAAE,CAAC;YACX,MAAM,IAAI,KAAK,CAAC,sDAAsD,CAAC,CAAC;QAC1E,CAAC;QAED,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;QACnD,KAAK,CAAC,UAAU,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;QACrC,OAAO,QAAQ,CAAC;IAClB,CAAC;IAED,YAAY,CAAC,KAAa;QACxB,OAAO,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC;IAC9C,CAAC;IAED,KAAK,CAAC,SAAS,CAAC,KAAa;QAC3B,IAAI,CAAC,WAAW,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC;IACpC,CAAC;IAED,eAAe,CAAC,KAAa,EAAE,QAAgC;QAC7D,IAAI,KAAK,IAAI,QAAQ,EAAE,CAAC;YACtB,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;QAC1C,CAAC;IACH,CAAC;IAED,kBAAkB;IACV,WAAW,CAAC,YAAgC;QAClD,IAAI,CAAC,YAAY,EAAE,CAAC;YAClB,OAAO,IAAI,CAAC;QACd,CAAC;QACD,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,YAAY,CAAC,CAAC;QAC1C,IAAI,CAAC,KAAK,EAAE,CAAC;YACX,KAAK,GAAG,IAAI,iBAAiB,CAAC,IAAI,CAAC,CAAC;YACpC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,YAAY,EAAE,KAAK,CAAC,CAAC;QACvC,CAAC;QACD,OAAO,KAAK,CAAC;IACf,CAAC;CACF"}
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
import { MultiWorkflowPool } from "src/multipool/multi-workflow-pool.js";
|
|
2
|
+
import { PoolEvent } from "src/multipool/interfaces.js";
|
|
3
|
+
export declare class PoolEventManager {
|
|
4
|
+
pool: MultiWorkflowPool;
|
|
5
|
+
hooks: Map<string, Array<Function>>;
|
|
6
|
+
constructor(pool: MultiWorkflowPool);
|
|
7
|
+
attachHook(event: string, listener: (e: PoolEvent) => void): void;
|
|
8
|
+
emitEvent(event: PoolEvent): void;
|
|
9
|
+
detachHook(event: string, listener: (e: PoolEvent) => void): void;
|
|
10
|
+
}
|
|
11
|
+
//# sourceMappingURL=pool-event-manager.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pool-event-manager.d.ts","sourceRoot":"","sources":["../../src/multipool/pool-event-manager.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,sCAAsC,CAAC;AACzE,OAAO,EAAE,SAAS,EAAE,MAAM,6BAA6B,CAAC;AAExD,qBAAa,gBAAgB;IAE3B,IAAI,EAAE,iBAAiB,CAAC;IAExB,KAAK,EAAE,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAa;gBAEpC,IAAI,EAAE,iBAAiB;IAInC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAC,EAAE,SAAS,KAAK,IAAI;IAO1D,SAAS,CAAC,KAAK,EAAE,SAAS;IAS1B,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,CAAC,CAAC,EAAE,SAAS,KAAK,IAAI;CAM3D"}
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
export class PoolEventManager {
|
|
2
|
+
pool;
|
|
3
|
+
hooks = new Map();
|
|
4
|
+
constructor(pool) {
|
|
5
|
+
this.pool = pool;
|
|
6
|
+
}
|
|
7
|
+
attachHook(event, listener) {
|
|
8
|
+
if (!this.hooks.has(event)) {
|
|
9
|
+
this.hooks.set(event, []);
|
|
10
|
+
}
|
|
11
|
+
this.hooks.get(event).push(listener);
|
|
12
|
+
}
|
|
13
|
+
emitEvent(event) {
|
|
14
|
+
const listeners = this.hooks.get(event.type);
|
|
15
|
+
if (listeners) {
|
|
16
|
+
for (const listener of listeners) {
|
|
17
|
+
listener(event);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
detachHook(event, listener) {
|
|
22
|
+
const listeners = this.hooks.get(event);
|
|
23
|
+
if (listeners) {
|
|
24
|
+
this.hooks.set(event, listeners.filter(l => l !== listener));
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
//# sourceMappingURL=pool-event-manager.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pool-event-manager.js","sourceRoot":"","sources":["../../src/multipool/pool-event-manager.ts"],"names":[],"mappings":"AAGA,MAAM,OAAO,gBAAgB;IAE3B,IAAI,CAAoB;IAExB,KAAK,GAAiC,IAAI,GAAG,EAAE,CAAC;IAEhD,YAAY,IAAuB;QACjC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,CAAC;IAED,UAAU,CAAC,KAAa,EAAE,QAAgC;QACxD,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC;YAC3B,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;QAC5B,CAAC;QACD,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAE,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;IACxC,CAAC;IAED,SAAS,CAAC,KAAgB;QACxB,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAC7C,IAAI,SAAS,EAAE,CAAC;YACd,KAAK,MAAM,QAAQ,IAAI,SAAS,EAAE,CAAC;gBACjC,QAAQ,CAAC,KAAK,CAAC,CAAC;YAClB,CAAC;QACH,CAAC;IACH,CAAC;IAED,UAAU,CAAC,KAAa,EAAE,QAAgC;QACxD,MAAM,SAAS,GAAG,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;QACxC,IAAI,SAAS,EAAE,CAAC;YACd,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,SAAS,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,KAAK,QAAQ,CAAC,CAAC,CAAC;QAC/D,CAAC;IACH,CAAC;CACF"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { MultiWorkflowPool } from "src/multipool/multi-workflow-pool.js";
|
|
2
|
+
import { Workflow } from "src/multipool/workflow.js";
|
|
3
|
+
interface JobState {
|
|
4
|
+
jobId: string;
|
|
5
|
+
workflow: Workflow;
|
|
6
|
+
status: 'pending' | 'running' | 'completed' | 'failed' | 'canceled';
|
|
7
|
+
}
|
|
8
|
+
export declare class JobStateRegistry {
|
|
9
|
+
pool: MultiWorkflowPool;
|
|
10
|
+
jobs: Map<string, JobState>;
|
|
11
|
+
constructor(pool: MultiWorkflowPool);
|
|
12
|
+
addJob(workflow: Workflow): `${string}-${string}-${string}-${string}-${string}`;
|
|
13
|
+
getJobStatus(jobId: string): void;
|
|
14
|
+
cancelJob(jobId: string): void;
|
|
15
|
+
}
|
|
16
|
+
export {};
|
|
17
|
+
//# sourceMappingURL=job-state-registry.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"job-state-registry.d.ts","sourceRoot":"","sources":["../../../src/multipool/tests/job-state-registry.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,sCAAsC,CAAC;AACzE,OAAO,EAAE,QAAQ,EAAE,MAAM,2BAA2B,CAAC;AAGrD,UAAU,QAAQ;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,QAAQ,CAAC;IACnB,MAAM,EAAE,SAAS,GAAG,SAAS,GAAG,WAAW,GAAG,QAAQ,GAAG,UAAU,CAAC;CACrE;AAED,qBAAa,gBAAgB;IAE3B,IAAI,EAAE,iBAAiB,CAAC;IAExB,IAAI,EAAE,GAAG,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAa;gBAE5B,IAAI,EAAE,iBAAiB;IAInC,MAAM,CAAC,QAAQ,EAAE,QAAQ;IAYzB,YAAY,CAAC,KAAK,EAAE,MAAM;IAI1B,SAAS,CAAC,KAAK,EAAE,MAAM;CAGxB"}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
export class JobStateRegistry {
|
|
3
|
+
pool;
|
|
4
|
+
jobs = new Map();
|
|
5
|
+
constructor(pool) {
|
|
6
|
+
this.pool = pool;
|
|
7
|
+
}
|
|
8
|
+
addJob(workflow) {
|
|
9
|
+
// Create new job id
|
|
10
|
+
const jobId = randomUUID();
|
|
11
|
+
const jobState = {
|
|
12
|
+
jobId,
|
|
13
|
+
workflow,
|
|
14
|
+
status: 'pending',
|
|
15
|
+
};
|
|
16
|
+
this.jobs.set(jobId, jobState);
|
|
17
|
+
return jobId;
|
|
18
|
+
}
|
|
19
|
+
getJobStatus(jobId) {
|
|
20
|
+
}
|
|
21
|
+
cancelJob(jobId) {
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
//# sourceMappingURL=job-state-registry.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"job-state-registry.js","sourceRoot":"","sources":["../../../src/multipool/tests/job-state-registry.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAQzC,MAAM,OAAO,gBAAgB;IAE3B,IAAI,CAAoB;IAExB,IAAI,GAA0B,IAAI,GAAG,EAAE,CAAC;IAExC,YAAY,IAAuB;QACjC,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC;IACnB,CAAC;IAED,MAAM,CAAC,QAAkB;QACvB,oBAAoB;QACpB,MAAM,KAAK,GAAG,UAAU,EAAE,CAAC;QAC3B,MAAM,QAAQ,GAAa;YACzB,KAAK;YACL,QAAQ;YACR,MAAM,EAAE,SAAS;SAClB,CAAC;QACF,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,KAAK,EAAE,QAAQ,CAAC,CAAC;QAC/B,OAAO,KAAK,CAAC;IACf,CAAC;IAED,YAAY,CAAC,KAAa;IAE1B,CAAC;IAED,SAAS,CAAC,KAAa;IAEvB,CAAC;CACF"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"multipool-basic.d.ts","sourceRoot":"","sources":["../../../src/multipool/tests/multipool-basic.ts"],"names":[],"mappings":""}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"multipool-basic.js","sourceRoot":"","sources":["../../../src/multipool/tests/multipool-basic.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,sCAAsC,CAAC;AAEzE,MAAM,IAAI,GAAG,IAAI,iBAAiB,EAAE,CAAC;AAErC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC"}
|
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
import type { AugmentNodes } from '../node-type-hints.js';
|
|
2
|
+
type WorkflowJSON = Record<string, any>;
|
|
3
|
+
export interface WorkflowResultMeta {
|
|
4
|
+
_promptId?: string;
|
|
5
|
+
_nodes?: string[];
|
|
6
|
+
_aliases?: Record<string, string>;
|
|
7
|
+
_autoSeeds?: Record<string, number>;
|
|
8
|
+
}
|
|
9
|
+
export type WorkflowResult = WorkflowResultMeta & Record<string, any>;
|
|
10
|
+
export interface WorkflowRunOptions {
|
|
11
|
+
includeOutputs?: string[];
|
|
12
|
+
}
|
|
13
|
+
export interface WorkflowJobEvents<R extends WorkflowResult = WorkflowResult> {
|
|
14
|
+
progress: (info: {
|
|
15
|
+
value: number;
|
|
16
|
+
max: number;
|
|
17
|
+
prompt_id: string;
|
|
18
|
+
node: string;
|
|
19
|
+
}) => void;
|
|
20
|
+
progress_pct: (pct: number, info: any) => void;
|
|
21
|
+
preview: (blob: Blob) => void;
|
|
22
|
+
preview_meta: (data: {
|
|
23
|
+
blob: Blob;
|
|
24
|
+
metadata: any;
|
|
25
|
+
}) => void;
|
|
26
|
+
pending: (promptId: string) => void;
|
|
27
|
+
start: (promptId: string) => void;
|
|
28
|
+
output: (key: string, data: any) => void;
|
|
29
|
+
finished: (data: R, promptId: string) => void;
|
|
30
|
+
failed: (err: Error, promptId?: string) => void;
|
|
31
|
+
}
|
|
32
|
+
type EventKey<R extends WorkflowResult> = keyof WorkflowJobEvents<R>;
|
|
33
|
+
export declare class WorkflowJob<R extends WorkflowResult = WorkflowResult> {
|
|
34
|
+
private emitter;
|
|
35
|
+
private donePromise;
|
|
36
|
+
private doneResolve;
|
|
37
|
+
private doneReject;
|
|
38
|
+
lastProgressPct: number;
|
|
39
|
+
constructor();
|
|
40
|
+
on<K extends EventKey<R>>(evt: K, fn: WorkflowJobEvents<R>[K]): this;
|
|
41
|
+
off<K extends EventKey<R>>(evt: K, fn: WorkflowJobEvents<R>[K]): this;
|
|
42
|
+
/** Await final mapped outputs */
|
|
43
|
+
done(): Promise<R>;
|
|
44
|
+
_emit<K extends EventKey<R>>(evt: K, ...args: Parameters<WorkflowJobEvents<R>[K]>): void;
|
|
45
|
+
_finish(data: R): void;
|
|
46
|
+
_fail(err: Error, promptId?: string): void;
|
|
47
|
+
}
|
|
48
|
+
type NodeInputs<T> = T extends {
|
|
49
|
+
inputs: infer I;
|
|
50
|
+
} ? I : never;
|
|
51
|
+
type OutputMap = Record<string, any>;
|
|
52
|
+
type OutputShapeFor<C extends string> = C extends 'SaveImage' | 'SaveImageAdvanced' ? {
|
|
53
|
+
images?: any[];
|
|
54
|
+
} : C extends 'KSampler' ? {
|
|
55
|
+
samples?: any;
|
|
56
|
+
} : any;
|
|
57
|
+
type NodeOutputFor<T extends WorkflowJSON, K extends keyof T & string> = T[K] extends {
|
|
58
|
+
class_type: infer C;
|
|
59
|
+
} ? C extends string ? OutputShapeFor<C> : any : any;
|
|
60
|
+
export declare class Workflow<T extends WorkflowJSON = WorkflowJSON, O extends OutputMap = {}> {
|
|
61
|
+
private json;
|
|
62
|
+
private outputNodeIds;
|
|
63
|
+
private outputAliases;
|
|
64
|
+
private bypassedNodes;
|
|
65
|
+
private _pendingImageInputs;
|
|
66
|
+
private _pendingFolderFiles;
|
|
67
|
+
/** Structural hash of the workflow JSON for compatibility tracking in failover scenarios */
|
|
68
|
+
structureHash?: string;
|
|
69
|
+
static from<TD extends WorkflowJSON>(data: TD, opts?: {
|
|
70
|
+
autoHash?: boolean;
|
|
71
|
+
}): Workflow<TD, {}>;
|
|
72
|
+
static from(data: string, opts?: {
|
|
73
|
+
autoHash?: boolean;
|
|
74
|
+
}): Workflow;
|
|
75
|
+
constructor(json: T, opts?: {
|
|
76
|
+
autoHash?: boolean;
|
|
77
|
+
});
|
|
78
|
+
/**
|
|
79
|
+
* Like from(), but augments known node types (e.g., KSampler) with soft union hints
|
|
80
|
+
* for inputs such as sampler_name & scheduler while still allowing arbitrary strings.
|
|
81
|
+
*/
|
|
82
|
+
static fromAugmented<TD extends WorkflowJSON>(data: TD, opts?: {
|
|
83
|
+
autoHash?: boolean;
|
|
84
|
+
}): Workflow<AugmentNodes<TD>, {}>;
|
|
85
|
+
/** Set a nested input path on a node e.g. set('9.inputs.text','hello') */
|
|
86
|
+
set(path: string, value: any): this;
|
|
87
|
+
/** Attach a single image buffer to a node input (e.g., LoadImage.image). Will upload on run() then set the input to the filename. */
|
|
88
|
+
attachImage(nodeId: keyof T & string, inputName: string, data: Blob | Buffer | ArrayBuffer | Uint8Array, fileName: string, opts?: {
|
|
89
|
+
subfolder?: string;
|
|
90
|
+
override?: boolean;
|
|
91
|
+
}): this;
|
|
92
|
+
/** Attach multiple files into a server subfolder (useful for LoadImageSetFromFolderNode). */
|
|
93
|
+
attachFolderFiles(subfolder: string, files: Array<{
|
|
94
|
+
data: Blob | Buffer | ArrayBuffer | Uint8Array;
|
|
95
|
+
fileName: string;
|
|
96
|
+
}>, opts?: {
|
|
97
|
+
override?: boolean;
|
|
98
|
+
}): this;
|
|
99
|
+
/**
|
|
100
|
+
* Sugar for setting a node's input: wf.input('SAMPLER','steps',30)
|
|
101
|
+
* Equivalent to set('SAMPLER.inputs.steps', 30).
|
|
102
|
+
* Performs a light existence check to aid DX (doesn't throw if missing by design unless strict parameter is passed).
|
|
103
|
+
*/
|
|
104
|
+
input<K extends keyof T, P extends keyof NodeInputs<T[K]> & string>(nodeId: K, inputName: P, value: NodeInputs<T[K]>[P], opts?: {
|
|
105
|
+
strict?: boolean;
|
|
106
|
+
}): this;
|
|
107
|
+
/**
|
|
108
|
+
* Batch variant:
|
|
109
|
+
* - wf.inputs('SAMPLER', { steps: 30, cfg: 7 })
|
|
110
|
+
* - wf.inputs({ SAMPLER: { steps: 30 }, CLIP: { text: 'hello' } })
|
|
111
|
+
* Honors strict mode (throws if node missing when strict:true).
|
|
112
|
+
*/
|
|
113
|
+
batchInputs<K extends keyof T>(nodeId: K, values: Partial<NodeInputs<T[K]>>, opts?: {
|
|
114
|
+
strict?: boolean;
|
|
115
|
+
}): this;
|
|
116
|
+
batchInputs<M extends {
|
|
117
|
+
[N in keyof T]?: Partial<NodeInputs<T[N]>>;
|
|
118
|
+
}>(batch: M, opts?: {
|
|
119
|
+
strict?: boolean;
|
|
120
|
+
}): this;
|
|
121
|
+
/**
|
|
122
|
+
* Mark a node to be bypassed during execution.
|
|
123
|
+
* The node will be removed and its connections automatically rewired.
|
|
124
|
+
*
|
|
125
|
+
* @param node - Node ID to bypass
|
|
126
|
+
* @returns This workflow instance for chaining
|
|
127
|
+
*/
|
|
128
|
+
bypass(node: keyof T & string): this;
|
|
129
|
+
/**
|
|
130
|
+
* Mark multiple nodes to be bypassed during execution.
|
|
131
|
+
*
|
|
132
|
+
* @param nodes - Array of node IDs to bypass
|
|
133
|
+
* @returns This workflow instance for chaining
|
|
134
|
+
*/
|
|
135
|
+
bypass(nodes: (keyof T & string)[]): this;
|
|
136
|
+
/**
|
|
137
|
+
* Remove a node from the bypass list, re-enabling it.
|
|
138
|
+
*
|
|
139
|
+
* @param node - Node ID to reinstate
|
|
140
|
+
* @returns This workflow instance for chaining
|
|
141
|
+
*/
|
|
142
|
+
reinstate(node: keyof T & string): this;
|
|
143
|
+
/**
|
|
144
|
+
* Remove multiple nodes from the bypass list.
|
|
145
|
+
*
|
|
146
|
+
* @param nodes - Array of node IDs to reinstate
|
|
147
|
+
* @returns This workflow instance for chaining
|
|
148
|
+
*/
|
|
149
|
+
reinstate(nodes: (keyof T & string)[]): this;
|
|
150
|
+
/**
|
|
151
|
+
* Update the structural hash after making non-dynamic changes to the workflow.
|
|
152
|
+
* Call this if you modify the workflow structure after initialization and the autoHash was disabled,
|
|
153
|
+
* or if you want to recalculate the hash after making structural changes.
|
|
154
|
+
*
|
|
155
|
+
* Example:
|
|
156
|
+
* ```
|
|
157
|
+
* const wf = Workflow.from(data, { autoHash: false });
|
|
158
|
+
* wf.input('SAMPLER', 'ckpt_name', 'model_v1.safetensors');
|
|
159
|
+
* wf.updateHash(); // Recompute hash after structural change
|
|
160
|
+
* ```
|
|
161
|
+
*/
|
|
162
|
+
updateHash(): this;
|
|
163
|
+
/** IDE helper returning empty object typed as final result (aliases + metadata). */
|
|
164
|
+
typedResult(): WorkflowResult & O;
|
|
165
|
+
}
|
|
166
|
+
export interface Workflow<T extends WorkflowJSON = WorkflowJSON, O extends OutputMap = {}> {
|
|
167
|
+
output<NodeId extends keyof T & string>(nodeId: NodeId): Workflow<T, O & Record<NodeId, NodeOutputFor<T, NodeId>>>;
|
|
168
|
+
output<Spec extends `${string}:${keyof T & string}`>(spec: Spec): Workflow<T, O & (Spec extends `${infer Alias}:${infer Node}` ? (Node extends keyof T & string ? Record<Alias, NodeOutputFor<T, Node>> : Record<Alias, any>) : {})>;
|
|
169
|
+
output<Alias extends string, NodeId extends keyof T & string>(alias: Alias, nodeId: NodeId): Workflow<T, O & Record<Alias, NodeOutputFor<T, NodeId>>>;
|
|
170
|
+
output<A extends string>(single: A): Workflow<T, O & Record<A, any>>;
|
|
171
|
+
output<Alias extends string, NodeId extends string>(alias: Alias, nodeId: NodeId): Workflow<T, O & Record<Alias, any>>;
|
|
172
|
+
}
|
|
173
|
+
export {};
|
|
174
|
+
//# sourceMappingURL=workflow.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"workflow.d.ts","sourceRoot":"","sources":["../../src/multipool/workflow.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAE1D,KAAK,YAAY,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAExC,MAAM,WAAW,kBAAkB;IAC/B,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClC,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;CACvC;AACD,MAAM,MAAM,cAAc,GAAG,kBAAkB,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAEtE,MAAM,WAAW,kBAAkB;IAC/B,cAAc,CAAC,EAAE,MAAM,EAAE,CAAC;CAC7B;AAED,MAAM,WAAW,iBAAiB,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;IACxE,QAAQ,EAAE,CAAC,IAAI,EAAE;QACb,KAAK,EAAE,MAAM,CAAC;QACd,GAAG,EAAE,MAAM,CAAC;QACZ,SAAS,EAAE,MAAM,CAAC;QAClB,IAAI,EAAE,MAAM,CAAC;KAChB,KAAK,IAAI,CAAC;IACX,YAAY,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IAC/C,OAAO,EAAE,CAAC,IAAI,EAAE,IAAI,KAAK,IAAI,CAAC;IAC9B,YAAY,EAAE,CAAC,IAAI,EAAE;QAAE,IAAI,EAAE,IAAI,CAAC;QAAC,QAAQ,EAAE,GAAG,CAAA;KAAE,KAAK,IAAI,CAAC;IAC5D,OAAO,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IACpC,KAAK,EAAE,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAClC,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,IAAI,CAAC;IACzC,QAAQ,EAAE,CAAC,IAAI,EAAE,CAAC,EAAE,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;IAC9C,MAAM,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,QAAQ,CAAC,EAAE,MAAM,KAAK,IAAI,CAAC;CACnD;AAED,KAAK,QAAQ,CAAC,CAAC,SAAS,cAAc,IAAI,MAAM,iBAAiB,CAAC,CAAC,CAAC,CAAC;AAcrE,qBAAa,WAAW,CAAC,CAAC,SAAS,cAAc,GAAG,cAAc;IAC9D,OAAO,CAAC,OAAO,CAAqB;IACpC,OAAO,CAAC,WAAW,CAAa;IAChC,OAAO,CAAC,WAAW,CAAkB;IACrC,OAAO,CAAC,UAAU,CAAoB;IAC/B,eAAe,EAAE,MAAM,CAAM;;IAOpC,EAAE,CAAC,CAAC,SAAS,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE,EAAE,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC7D,GAAG,CAAC,CAAC,SAAS,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,EAAE,EAAE,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IAC9D,iCAAiC;IACjC,IAAI;IACJ,KAAK,CAAC,CAAC,SAAS,QAAQ,CAAC,CAAC,CAAC,EAAE,GAAG,EAAE,CAAC,EAAE,GAAG,IAAI,EAAE,UAAU,CAAC,iBAAiB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;IACjF,OAAO,CAAC,IAAI,EAAE,CAAC;IACf,KAAK,CAAC,GAAG,EAAE,KAAK,EAAE,QAAQ,CAAC,EAAE,MAAM;CACtC;AAGD,KAAK,UAAU,CAAC,CAAC,IAAI,CAAC,SAAS;IAAE,MAAM,EAAE,MAAM,CAAC,CAAA;CAAE,GAAG,CAAC,GAAG,KAAK,CAAC;AAG/D,KAAK,SAAS,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;AAIrC,KAAK,cAAc,CAAC,CAAC,SAAS,MAAM,IAChC,CAAC,SAAS,WAAW,GAAG,mBAAmB,GAAG;IAAE,MAAM,CAAC,EAAE,GAAG,EAAE,CAAA;CAAE,GAChE,CAAC,SAAS,UAAU,GAAG;IAAE,OAAO,CAAC,EAAE,GAAG,CAAA;CAAE,GACxC,GAAG,CAAC;AAER,KAAK,aAAa,CAAC,CAAC,SAAS,YAAY,EAAE,CAAC,SAAS,MAAM,CAAC,GAAG,MAAM,IACjE,CAAC,CAAC,CAAC,CAAC,SAAS;IAAE,UAAU,EAAE,MAAM,CAAC,CAAA;CAAE,GAClC,CAAC,SAAS,MAAM,GAChB,cAAc,CAAC,CAAC,CAAC,GACjB,GAAG,GACH,GAAG,CAAC;AAEV,qBAAa,QAAQ,CAAC,CAAC,SAAS,YAAY,GAAG,YAAY,EAAE,CAAC,SAAS,SAAS,GAAG,EAAE;IACjF,OAAO,CAAC,IAAI,CAAI;IAChB,OAAO,CAAC,aAAa,CAAgB;IACrC,OAAO,CAAC,aAAa,CAA8B;IACnD,OAAO,CAAC,aAAa,CAAmB;IAExC,OAAO,CAAC,mBAAmB,CAA0H;IACrJ,OAAO,CAAC,mBAAmB,CAAsF;IAEjH,4FAA4F;IAC5F,aAAa,CAAC,EAAE,MAAM,CAAC;IAGvB,MAAM,CAAC,IAAI,CAAC,EAAE,SAAS,YAAY,EAAE,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,EAAE,EAAE,EAAE,CAAC;IAC/F,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ;gBAatD,IAAI,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAE;IAQlD;;;OAGG;IACH,MAAM,CAAC,aAAa,CAAC,EAAE,SAAS,YAAY,EAAE,IAAI,EAAE,EAAE,EAAE,IAAI,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,QAAQ,CAAC,YAAY,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;IAItH,0EAA0E;IAC1E,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,GAAG;IAW5B,qIAAqI;IACrI,WAAW,CAAC,MAAM,EAAE,MAAM,CAAC,GAAG,MAAM,EAAE,SAAS,EAAE,MAAM,EAAE,IAAI,EAAE,IAAI,GAAG,MAAM,GAAG,WAAW,GAAG,UAAU,EAAE,QAAQ,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAE;IAM5K,6FAA6F;IAC7F,iBAAiB,CAAC,SAAS,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,CAAC;QAAE,IAAI,EAAE,IAAI,GAAG,MAAM,GAAG,WAAW,GAAG,UAAU,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAA;KAAE,CAAC,EAAE,IAAI,CAAC,EAAE;QAAE,QAAQ,CAAC,EAAE,OAAO,CAAA;KAAE;IAQtJ;;;;OAIG;IACH,KAAK,CAAC,CAAC,SAAS,MAAM,CAAC,EAAE,CAAC,SAAS,MAAM,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,CAAC,EAAE,SAAS,EAAE,CAAC,EAAE,KAAK,EAAE,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAE;IAiBpJ;;;;;OAKG;IACH,WAAW,CAAC,CAAC,SAAS,MAAM,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IAC/G,WAAW,CAAC,CAAC,SAAS;SAAG,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,EAAE,OAAO,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;KAAE,EAAE,KAAK,EAAE,CAAC,EAAE,IAAI,CAAC,EAAE;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAE,GAAG,IAAI;IA8DlH;;;;;;OAMG;IACH,MAAM,CAAC,IAAI,EAAE,MAAM,CAAC,GAAG,MAAM,GAAG,IAAI;IAEpC;;;;;OAKG;IACH,MAAM,CAAC,KAAK,EAAE,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,EAAE,GAAG,IAAI;IAczC;;;;;OAKG;IACH,SAAS,CAAC,IAAI,EAAE,MAAM,CAAC,GAAG,MAAM,GAAG,IAAI;IAEvC;;;;;OAKG;IACH,SAAS,CAAC,KAAK,EAAE,CAAC,MAAM,CAAC,GAAG,MAAM,CAAC,EAAE,GAAG,IAAI;IAe5C;;;;;;;;;;;OAWG;IACH,UAAU,IAAI,IAAI;IAKlB,oFAAoF;IACpF,WAAW,IAAI,cAAc,GAAG,CAAC;CACpC;AAGD,MAAM,WAAW,QAAQ,CAAC,CAAC,SAAS,YAAY,GAAG,YAAY,EAAE,CAAC,SAAS,SAAS,GAAG,EAAE;IAErF,MAAM,CAAC,MAAM,SAAS,MAAM,CAAC,GAAG,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,aAAa,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC;IAEnH,MAAM,CAAC,IAAI,SAAS,GAAG,MAAM,IAAI,MAAM,CAAC,GAAG,MAAM,EAAE,EAAE,IAAI,EAAE,IAAI,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,CAC9E,IAAI,SAAS,GAAG,MAAM,KAAK,IAAI,MAAM,IAAI,EAAE,GAAG,CAAC,IAAI,SAAS,MAAM,CAAC,GAAG,MAAM,GAAG,MAAM,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,GAAG,EAAE,CAClJ,CAAC,CAAC;IAEH,MAAM,CAAC,KAAK,SAAS,MAAM,EAAE,MAAM,SAAS,MAAM,CAAC,GAAG,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC,EAAE,MAAM,CAAC,CAAC,CAAC,CAAC;IAEtJ,MAAM,CAAC,CAAC,SAAS,MAAM,EAAE,MAAM,EAAE,CAAC,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC;IACrE,MAAM,CAAC,KAAK,SAAS,MAAM,EAAE,MAAM,SAAS,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,GAAG,QAAQ,CAAC,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,GAAG,CAAC,CAAC,CAAC;CAC1H"}
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
import { hashWorkflow } from '../pool/utils/hash.js';
|
|
2
|
+
class TinyEmitter {
|
|
3
|
+
listeners = new Map();
|
|
4
|
+
on(evt, fn) {
|
|
5
|
+
if (!this.listeners.has(evt))
|
|
6
|
+
this.listeners.set(evt, new Set());
|
|
7
|
+
this.listeners.get(evt).add(fn);
|
|
8
|
+
return () => this.off(evt, fn);
|
|
9
|
+
}
|
|
10
|
+
off(evt, fn) { this.listeners.get(evt)?.delete(fn); }
|
|
11
|
+
emit(evt, ...args) { this.listeners.get(evt)?.forEach(fn => { try {
|
|
12
|
+
fn(...args);
|
|
13
|
+
}
|
|
14
|
+
catch { } }); }
|
|
15
|
+
removeAll() { this.listeners.clear(); }
|
|
16
|
+
}
|
|
17
|
+
export class WorkflowJob {
|
|
18
|
+
emitter = new TinyEmitter();
|
|
19
|
+
donePromise;
|
|
20
|
+
doneResolve;
|
|
21
|
+
doneReject;
|
|
22
|
+
lastProgressPct = -1;
|
|
23
|
+
constructor() {
|
|
24
|
+
this.donePromise = new Promise((res, rej) => { this.doneResolve = res; this.doneReject = rej; });
|
|
25
|
+
// Prevent unhandled rejection warnings by attaching a catch handler
|
|
26
|
+
// The actual error handling happens when user calls done()
|
|
27
|
+
this.donePromise.catch(() => { });
|
|
28
|
+
}
|
|
29
|
+
on(evt, fn) { this.emitter.on(evt, fn); return this; }
|
|
30
|
+
off(evt, fn) { this.emitter.off(evt, fn); return this; }
|
|
31
|
+
/** Await final mapped outputs */
|
|
32
|
+
done() { return this.donePromise; }
|
|
33
|
+
_emit(evt, ...args) { this.emitter.emit(evt, ...args); }
|
|
34
|
+
_finish(data) { this.doneResolve(data); this.emitter.emit('finished', data, data._promptId); }
|
|
35
|
+
_fail(err, promptId) { this.doneReject(err); this.emitter.emit('failed', err, promptId); }
|
|
36
|
+
}
|
|
37
|
+
export class Workflow {
|
|
38
|
+
json;
|
|
39
|
+
outputNodeIds = [];
|
|
40
|
+
outputAliases = {}; // nodeId -> alias
|
|
41
|
+
bypassedNodes = []; // nodes to bypass during execution
|
|
42
|
+
// Pending assets to upload before execution
|
|
43
|
+
_pendingImageInputs = [];
|
|
44
|
+
_pendingFolderFiles = [];
|
|
45
|
+
/** Structural hash of the workflow JSON for compatibility tracking in failover scenarios */
|
|
46
|
+
structureHash;
|
|
47
|
+
static from(data, opts) {
|
|
48
|
+
if (typeof data === 'string') {
|
|
49
|
+
try {
|
|
50
|
+
const parsed = JSON.parse(data);
|
|
51
|
+
return new Workflow(parsed, opts);
|
|
52
|
+
}
|
|
53
|
+
catch (e) {
|
|
54
|
+
throw new Error('Failed to parse workflow JSON string', { cause: e });
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
return new Workflow(structuredClone(data), opts);
|
|
58
|
+
}
|
|
59
|
+
constructor(json, opts) {
|
|
60
|
+
this.json = structuredClone(json);
|
|
61
|
+
// Compute structural hash by default unless explicitly disabled
|
|
62
|
+
if (opts?.autoHash !== false) {
|
|
63
|
+
this.structureHash = hashWorkflow(this.json);
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Like from(), but augments known node types (e.g., KSampler) with soft union hints
|
|
68
|
+
* for inputs such as sampler_name & scheduler while still allowing arbitrary strings.
|
|
69
|
+
*/
|
|
70
|
+
static fromAugmented(data, opts) {
|
|
71
|
+
return Workflow.from(data, opts);
|
|
72
|
+
}
|
|
73
|
+
/** Set a nested input path on a node e.g. set('9.inputs.text','hello') */
|
|
74
|
+
set(path, value) {
|
|
75
|
+
const keys = path.split('.');
|
|
76
|
+
let cur = this.json;
|
|
77
|
+
for (let i = 0; i < keys.length - 1; i++) {
|
|
78
|
+
if (cur[keys[i]] === undefined)
|
|
79
|
+
cur[keys[i]] = {};
|
|
80
|
+
cur = cur[keys[i]];
|
|
81
|
+
}
|
|
82
|
+
cur[keys[keys.length - 1]] = value;
|
|
83
|
+
return this;
|
|
84
|
+
}
|
|
85
|
+
/** Attach a single image buffer to a node input (e.g., LoadImage.image). Will upload on run() then set the input to the filename. */
|
|
86
|
+
attachImage(nodeId, inputName, data, fileName, opts) {
|
|
87
|
+
const blob = toBlob(data, fileName);
|
|
88
|
+
this._pendingImageInputs.push({ nodeId: String(nodeId), inputName, blob, fileName, subfolder: opts?.subfolder, override: opts?.override });
|
|
89
|
+
return this;
|
|
90
|
+
}
|
|
91
|
+
/** Attach multiple files into a server subfolder (useful for LoadImageSetFromFolderNode). */
|
|
92
|
+
attachFolderFiles(subfolder, files, opts) {
|
|
93
|
+
for (const f of files) {
|
|
94
|
+
const blob = toBlob(f.data, f.fileName);
|
|
95
|
+
this._pendingFolderFiles.push({ subfolder, blob, fileName: f.fileName, override: opts?.override });
|
|
96
|
+
}
|
|
97
|
+
return this;
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Sugar for setting a node's input: wf.input('SAMPLER','steps',30)
|
|
101
|
+
* Equivalent to set('SAMPLER.inputs.steps', 30).
|
|
102
|
+
* Performs a light existence check to aid DX (doesn't throw if missing by design unless strict parameter is passed).
|
|
103
|
+
*/
|
|
104
|
+
input(nodeId, inputName, value, opts) {
|
|
105
|
+
const nodeKey = String(nodeId);
|
|
106
|
+
const node = this.json[nodeKey];
|
|
107
|
+
if (!node) {
|
|
108
|
+
if (opts?.strict)
|
|
109
|
+
throw new Error(`Workflow.input: node '${String(nodeId)}' not found`);
|
|
110
|
+
// create minimal node shell if non-strict (lets users build up dynamically)
|
|
111
|
+
this.json[nodeKey] = { inputs: { [inputName]: value } };
|
|
112
|
+
return this;
|
|
113
|
+
}
|
|
114
|
+
if (!node.inputs) {
|
|
115
|
+
if (opts?.strict)
|
|
116
|
+
throw new Error(`Workflow.input: node '${String(nodeId)}' missing inputs object`);
|
|
117
|
+
node.inputs = {};
|
|
118
|
+
}
|
|
119
|
+
node.inputs[inputName] = value;
|
|
120
|
+
return this;
|
|
121
|
+
}
|
|
122
|
+
batchInputs(a, b, c) {
|
|
123
|
+
// Form 1: (nodeId, values, opts)
|
|
124
|
+
if (typeof a === 'string') {
|
|
125
|
+
const nodeId = a;
|
|
126
|
+
const values = b || {};
|
|
127
|
+
const opts = c || {};
|
|
128
|
+
for (const [k, v] of Object.entries(values)) {
|
|
129
|
+
this.input(nodeId, k, v, opts);
|
|
130
|
+
}
|
|
131
|
+
return this;
|
|
132
|
+
}
|
|
133
|
+
// Form 2: (batchObject, opts)
|
|
134
|
+
const batch = a || {};
|
|
135
|
+
const opts = b || {};
|
|
136
|
+
for (const [nodeId, values] of Object.entries(batch)) {
|
|
137
|
+
if (!values)
|
|
138
|
+
continue;
|
|
139
|
+
for (const [k, v] of Object.entries(values)) {
|
|
140
|
+
this.input(nodeId, k, v, opts);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
return this;
|
|
144
|
+
}
|
|
145
|
+
/**
|
|
146
|
+
* Mark a node id whose outputs we want collected.
|
|
147
|
+
* Supports aliasing in two forms:
|
|
148
|
+
* - output('alias','9')
|
|
149
|
+
* - output('alias:9')
|
|
150
|
+
* - output('9') (no alias, raw node id key)
|
|
151
|
+
*/
|
|
152
|
+
output(a, b) {
|
|
153
|
+
let alias;
|
|
154
|
+
let nodeId;
|
|
155
|
+
if (b) {
|
|
156
|
+
// Heuristic: if first arg looks like a node id and second arg looks like an alias, swap
|
|
157
|
+
// Node ids are often numeric strings (e.g., '2'); aliases are non-numeric labels.
|
|
158
|
+
const looksLikeNodeId = (s) => /^\d+$/.test(s) || this.json[s];
|
|
159
|
+
if (looksLikeNodeId(String(a)) && !looksLikeNodeId(String(b))) {
|
|
160
|
+
nodeId = String(a);
|
|
161
|
+
alias = String(b);
|
|
162
|
+
try {
|
|
163
|
+
console.warn(`Workflow.output called as output(nodeId, alias). Interpreting as output(alias,nodeId): '${alias}:${nodeId}'`);
|
|
164
|
+
}
|
|
165
|
+
catch { }
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
alias = String(a);
|
|
169
|
+
nodeId = String(b);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
else {
|
|
173
|
+
// single param variant: maybe "alias:node" or just node
|
|
174
|
+
if (a.includes(':')) {
|
|
175
|
+
const [al, id] = a.split(':');
|
|
176
|
+
if (al && id) {
|
|
177
|
+
alias = al;
|
|
178
|
+
nodeId = id;
|
|
179
|
+
}
|
|
180
|
+
else {
|
|
181
|
+
nodeId = a;
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
else {
|
|
185
|
+
nodeId = a;
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
if (!this.outputNodeIds.includes(nodeId))
|
|
189
|
+
this.outputNodeIds.push(nodeId);
|
|
190
|
+
if (alias) {
|
|
191
|
+
this.outputAliases[nodeId] = alias;
|
|
192
|
+
}
|
|
193
|
+
return this; // typed refinement handled via declaration merging below
|
|
194
|
+
}
|
|
195
|
+
bypass(nodes) {
|
|
196
|
+
if (!Array.isArray(nodes)) {
|
|
197
|
+
nodes = [nodes];
|
|
198
|
+
}
|
|
199
|
+
for (const node of nodes) {
|
|
200
|
+
if (!this.bypassedNodes.includes(node)) {
|
|
201
|
+
this.bypassedNodes.push(node);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
return this;
|
|
205
|
+
}
|
|
206
|
+
reinstate(nodes) {
|
|
207
|
+
if (!Array.isArray(nodes)) {
|
|
208
|
+
nodes = [nodes];
|
|
209
|
+
}
|
|
210
|
+
for (const node of nodes) {
|
|
211
|
+
const idx = this.bypassedNodes.indexOf(node);
|
|
212
|
+
if (idx !== -1) {
|
|
213
|
+
this.bypassedNodes.splice(idx, 1);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
return this;
|
|
217
|
+
}
|
|
218
|
+
/**
|
|
219
|
+
* Update the structural hash after making non-dynamic changes to the workflow.
|
|
220
|
+
* Call this if you modify the workflow structure after initialization and the autoHash was disabled,
|
|
221
|
+
* or if you want to recalculate the hash after making structural changes.
|
|
222
|
+
*
|
|
223
|
+
* Example:
|
|
224
|
+
* ```
|
|
225
|
+
* const wf = Workflow.from(data, { autoHash: false });
|
|
226
|
+
* wf.input('SAMPLER', 'ckpt_name', 'model_v1.safetensors');
|
|
227
|
+
* wf.updateHash(); // Recompute hash after structural change
|
|
228
|
+
* ```
|
|
229
|
+
*/
|
|
230
|
+
updateHash() {
|
|
231
|
+
this.structureHash = hashWorkflow(this.json);
|
|
232
|
+
return this;
|
|
233
|
+
}
|
|
234
|
+
/** IDE helper returning empty object typed as final result (aliases + metadata). */
|
|
235
|
+
typedResult() { return {}; }
|
|
236
|
+
}
|
|
237
|
+
// Helper: normalize to Blob for upload
|
|
238
|
+
function toBlob(src, fileName) {
|
|
239
|
+
if (src instanceof Blob)
|
|
240
|
+
return src;
|
|
241
|
+
// Normalize everything to a plain ArrayBuffer for reliable BlobPart typing
|
|
242
|
+
let ab;
|
|
243
|
+
if (typeof Buffer !== 'undefined' && src instanceof Buffer) {
|
|
244
|
+
const u8 = new Uint8Array(src);
|
|
245
|
+
ab = u8.slice(0).buffer;
|
|
246
|
+
}
|
|
247
|
+
else if (src instanceof Uint8Array) {
|
|
248
|
+
const u8 = new Uint8Array(src.byteLength);
|
|
249
|
+
u8.set(src);
|
|
250
|
+
ab = u8.buffer;
|
|
251
|
+
}
|
|
252
|
+
else if (src instanceof ArrayBuffer) {
|
|
253
|
+
ab = src;
|
|
254
|
+
}
|
|
255
|
+
else {
|
|
256
|
+
ab = new ArrayBuffer(0);
|
|
257
|
+
}
|
|
258
|
+
return new Blob([ab], { type: mimeFromName(fileName) });
|
|
259
|
+
}
|
|
260
|
+
function mimeFromName(name) {
|
|
261
|
+
if (!name)
|
|
262
|
+
return undefined;
|
|
263
|
+
const n = name.toLowerCase();
|
|
264
|
+
if (n.endsWith('.png'))
|
|
265
|
+
return 'image/png';
|
|
266
|
+
if (n.endsWith('.jpg') || n.endsWith('.jpeg'))
|
|
267
|
+
return 'image/jpeg';
|
|
268
|
+
if (n.endsWith('.webp'))
|
|
269
|
+
return 'image/webp';
|
|
270
|
+
return undefined;
|
|
271
|
+
}
|
|
272
|
+
//# sourceMappingURL=workflow.js.map
|