@bian-womp/spark-workbench 0.3.36 → 0.3.38
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/cjs/index.cjs
CHANGED
|
@@ -652,15 +652,27 @@ class InMemoryWorkbench extends AbstractWorkbench {
|
|
|
652
652
|
if (!outputTypeId || outputValue === undefined)
|
|
653
653
|
return undefined;
|
|
654
654
|
const unwrap = (v) => sparkGraph.isTypedOutput(v) ? sparkGraph.getTypedOutputValue(v) : v;
|
|
655
|
-
const coerceIfNeeded = async (fromType,
|
|
656
|
-
if (!
|
|
655
|
+
const coerceIfNeeded = async (fromType, toTypes, value) => {
|
|
656
|
+
if (!toTypes)
|
|
657
657
|
return value;
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
658
|
+
const typesArray = Array.isArray(toTypes) ? toTypes : [toTypes];
|
|
659
|
+
// If output type matches any input type exactly, no coercion needed
|
|
660
|
+
if (typesArray.includes(fromType))
|
|
661
|
+
return value;
|
|
662
|
+
// If no coercion function available, return as-is
|
|
663
|
+
if (!runner?.coerce)
|
|
662
664
|
return value;
|
|
665
|
+
// Try coercing to each type until one succeeds
|
|
666
|
+
for (const toType of typesArray) {
|
|
667
|
+
try {
|
|
668
|
+
return await runner.coerce(fromType, toType, value);
|
|
669
|
+
}
|
|
670
|
+
catch {
|
|
671
|
+
// Continue to next type
|
|
672
|
+
}
|
|
663
673
|
}
|
|
674
|
+
// If all coercion attempts failed, return value as-is
|
|
675
|
+
return value;
|
|
664
676
|
};
|
|
665
677
|
const pos = nodePosition;
|
|
666
678
|
const isArray = outputTypeId.endsWith("[]");
|
|
@@ -673,8 +685,8 @@ class InMemoryWorkbench extends AbstractWorkbench {
|
|
|
673
685
|
let newNodeId;
|
|
674
686
|
if (singleTarget) {
|
|
675
687
|
const nodeDesc = registry.nodes.get(singleTarget.nodeTypeId);
|
|
676
|
-
const
|
|
677
|
-
const coerced = await coerceIfNeeded(outputTypeId,
|
|
688
|
+
const inTypes = sparkGraph.getInputDeclaredTypes(nodeDesc?.inputs, singleTarget.inputHandle);
|
|
689
|
+
const coerced = await coerceIfNeeded(outputTypeId, inTypes, unwrap(outputValue));
|
|
678
690
|
newNodeId = this.addNode({
|
|
679
691
|
typeId: singleTarget.nodeTypeId,
|
|
680
692
|
}, {
|
|
@@ -684,8 +696,8 @@ class InMemoryWorkbench extends AbstractWorkbench {
|
|
|
684
696
|
}
|
|
685
697
|
else if (isArray && arrTarget) {
|
|
686
698
|
const nodeDesc = registry.nodes.get(arrTarget.nodeTypeId);
|
|
687
|
-
const
|
|
688
|
-
const coerced = await coerceIfNeeded(outputTypeId,
|
|
699
|
+
const inTypes = sparkGraph.getInputDeclaredTypes(nodeDesc?.inputs, arrTarget.inputHandle);
|
|
700
|
+
const coerced = await coerceIfNeeded(outputTypeId, inTypes, unwrap(outputValue));
|
|
689
701
|
newNodeId = this.addNode({
|
|
690
702
|
typeId: arrTarget.nodeTypeId,
|
|
691
703
|
}, {
|
|
@@ -695,10 +707,10 @@ class InMemoryWorkbench extends AbstractWorkbench {
|
|
|
695
707
|
}
|
|
696
708
|
else if (isArray && elemTarget) {
|
|
697
709
|
const nodeDesc = registry.nodes.get(elemTarget.nodeTypeId);
|
|
698
|
-
const
|
|
710
|
+
const inTypes = sparkGraph.getInputDeclaredTypes(nodeDesc?.inputs, elemTarget.inputHandle);
|
|
699
711
|
const src = unwrap(outputValue);
|
|
700
712
|
const items = Array.isArray(src) ? src : [src];
|
|
701
|
-
const coercedItems = await Promise.all(items.map((v) => coerceIfNeeded(baseTypeId,
|
|
713
|
+
const coercedItems = await Promise.all(items.map((v) => coerceIfNeeded(baseTypeId, inTypes, v)));
|
|
702
714
|
const COLS = 4;
|
|
703
715
|
const DX = 180;
|
|
704
716
|
const DY = 160;
|