vellum-ai 0.14.14__py3-none-any.whl → 0.14.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. vellum/client/core/client_wrapper.py +1 -1
  2. vellum/client/core/pydantic_utilities.py +4 -5
  3. vellum/client/resources/document_indexes/client.py +0 -55
  4. vellum/client/types/document_index_read.py +0 -10
  5. vellum/client/types/logical_operator.py +1 -0
  6. vellum/plugins/pydantic.py +14 -4
  7. vellum/workflows/nodes/core/retry_node/tests/test_node.py +23 -0
  8. vellum/workflows/nodes/displayable/api_node/tests/test_api_node.py +29 -0
  9. vellum/workflows/nodes/displayable/bases/api_node/node.py +2 -2
  10. vellum/workflows/vellum_client.py +9 -5
  11. {vellum_ai-0.14.14.dist-info → vellum_ai-0.14.16.dist-info}/METADATA +2 -2
  12. {vellum_ai-0.14.14.dist-info → vellum_ai-0.14.16.dist-info}/RECORD +33 -32
  13. vellum_cli/image_push.py +76 -42
  14. vellum_cli/tests/test_image_push.py +56 -0
  15. vellum_ee/workflows/display/nodes/base_node_display.py +8 -5
  16. vellum_ee/workflows/display/nodes/base_node_vellum_display.py +1 -4
  17. vellum_ee/workflows/display/nodes/get_node_display_class.py +34 -8
  18. vellum_ee/workflows/display/nodes/vellum/base_adornment_node.py +55 -1
  19. vellum_ee/workflows/display/nodes/vellum/retry_node.py +1 -54
  20. vellum_ee/workflows/display/nodes/vellum/templating_node.py +1 -8
  21. vellum_ee/workflows/display/nodes/vellum/try_node.py +1 -42
  22. vellum_ee/workflows/display/tests/test_vellum_workflow_display.py +47 -10
  23. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_adornments_serialization.py +29 -33
  24. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_attributes_serialization.py +91 -106
  25. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_outputs_serialization.py +33 -38
  26. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_ports_serialization.py +197 -145
  27. vellum_ee/workflows/display/tests/workflow_serialization/generic_nodes/test_trigger_serialization.py +23 -26
  28. vellum_ee/workflows/display/utils/vellum.py +3 -0
  29. vellum_ee/workflows/display/workflows/base_workflow_display.py +3 -6
  30. vellum_ee/workflows/display/workflows/tests/test_workflow_display.py +52 -0
  31. {vellum_ai-0.14.14.dist-info → vellum_ai-0.14.16.dist-info}/LICENSE +0 -0
  32. {vellum_ai-0.14.14.dist-info → vellum_ai-0.14.16.dist-info}/WHEEL +0 -0
  33. {vellum_ai-0.14.14.dist-info → vellum_ai-0.14.16.dist-info}/entry_points.txt +0 -0
vellum_cli/image_push.py CHANGED
@@ -8,7 +8,7 @@ import docker
8
8
  from docker import DockerClient
9
9
  from dotenv import load_dotenv
10
10
 
11
- from vellum.workflows.vellum_client import create_vellum_client
11
+ from vellum.workflows.vellum_client import create_vellum_client, create_vellum_environment
12
12
  from vellum_cli.logger import load_cli_logger
13
13
 
14
14
  _SUPPORTED_ARCHITECTURE = "amd64"
@@ -19,6 +19,21 @@ def image_push_command(image: str, tags: Optional[List[str]] = None) -> None:
19
19
  logger = load_cli_logger()
20
20
  vellum_client = create_vellum_client()
21
21
 
22
+ # Check if we are self hosted by looking at our base url
23
+ api_url = create_vellum_environment().default
24
+ is_self_hosted = not api_url.endswith("vellum.ai") and "localhost:" not in api_url and "127.0.0.1" not in api_url
25
+ if is_self_hosted:
26
+ logger.info("Self hosted install detected...")
27
+
28
+ if is_self_hosted and "/" not in image:
29
+ logger.error(
30
+ "For adding images to your self hosted install you must include the "
31
+ "repository address in the provided image name. You must also have "
32
+ "already pushed the image to the docker repository that your self "
33
+ "hosted install is using."
34
+ )
35
+ exit(1)
36
+
22
37
  # We're using docker python SDK here instead of subprocess since it connects to the docker host directly
23
38
  # instead of using the command line so it seemed like it would possibly be a little more robust since
24
39
  # it might avoid peoples' wonky paths, unfortunately it doesn't support the manifest command which we need for
@@ -29,59 +44,78 @@ def image_push_command(image: str, tags: Optional[List[str]] = None) -> None:
29
44
  docker_client = docker.from_env()
30
45
  check_architecture(docker_client, image, logger)
31
46
 
32
- logger.info("Authenticating...")
33
- auth = vellum_client.container_images.docker_service_token()
34
-
35
- docker_client.login(
36
- username="oauth2accesstoken",
37
- password=auth.access_token,
38
- registry=auth.repository,
39
- )
40
-
41
47
  repo_split = image.split("/")
42
48
  tag_split = repo_split[-1].split(":")
43
49
  image_name = tag_split[0]
44
50
  main_tag = tag_split[1] if len(tag_split) > 1 else "latest"
45
-
46
51
  all_tags = [main_tag, *(tags or [])]
47
- for tag in all_tags:
48
- vellum_image_name = f"{auth.repository}/{image_name}:{tag}"
49
-
50
- docker_client.api.tag(image, vellum_image_name)
51
-
52
- push_result = docker_client.images.push(repository=vellum_image_name, stream=True)
53
-
54
- # Here were trying to mime the output you would get from a normal docker push, which
55
- # the python sdk makes as hard as possible.
56
- for raw_line in push_result:
57
- try:
58
- for sub_line in raw_line.decode("utf-8").split("\r\n"):
59
- line = json.loads(sub_line)
60
- error_message = line.get("errorDetail", {}).get("message")
61
- status = line.get("status")
62
- id = line.get("id", "")
63
-
64
- if error_message:
65
- logger.error(error_message)
66
- exit(1)
67
- elif status == "Waiting":
68
- continue
69
- elif status:
70
- logger.info(f"{id}{': ' if id else ''}{status}")
71
- else:
72
- logger.info(line)
73
- except Exception:
74
- continue
75
52
 
76
- result = subprocess.run(
53
+ if is_self_hosted:
54
+ # Include the repo for self hosted since we skip pushing it to our internal repo and expect
55
+ # the user the push it themselves and provide us with the repo name.
56
+ image_name = image.split(":")[0]
57
+
58
+ logger.info("Checking if image already exists in repository...")
59
+ exists_result = subprocess.run(
60
+ ["docker", "manifest", "inspect", image],
61
+ stdout=subprocess.PIPE,
62
+ stderr=subprocess.PIPE,
63
+ )
64
+
65
+ if exists_result.returncode != 0:
66
+ logger.error(
67
+ "Image does not exist in repository. Push the image to the "
68
+ "repository your self hosted install is using and try again."
69
+ )
70
+ exit(1)
71
+ else:
72
+ logger.info("Authenticating...")
73
+ auth = vellum_client.container_images.docker_service_token()
74
+
75
+ docker_client.login(
76
+ username="oauth2accesstoken",
77
+ password=auth.access_token,
78
+ registry=auth.repository,
79
+ )
80
+
81
+ for tag in all_tags:
82
+ vellum_image_name = f"{auth.repository}/{image_name}:{tag}"
83
+
84
+ docker_client.api.tag(image, vellum_image_name)
85
+
86
+ push_result = docker_client.images.push(repository=vellum_image_name, stream=True)
87
+
88
+ # Here were trying to mime the output you would get from a normal docker push, which
89
+ # the python sdk makes as hard as possible.
90
+ for raw_line in push_result:
91
+ try:
92
+ for sub_line in raw_line.decode("utf-8").split("\r\n"):
93
+ line = json.loads(sub_line)
94
+ error_message = line.get("errorDetail", {}).get("message")
95
+ status = line.get("status")
96
+ id = line.get("id", "")
97
+
98
+ if error_message:
99
+ logger.error(error_message)
100
+ exit(1)
101
+ elif status == "Waiting":
102
+ continue
103
+ elif status:
104
+ logger.info(f"{id}{': ' if id else ''}{status}")
105
+ else:
106
+ logger.info(line)
107
+ except Exception:
108
+ continue
109
+
110
+ inspect_result = subprocess.run(
77
111
  ["docker", "inspect", "--format='{{index .RepoDigests 0}}'", image],
78
112
  stdout=subprocess.PIPE,
79
113
  stderr=subprocess.PIPE,
80
114
  )
81
115
 
82
116
  sha = ""
83
- if result.returncode == 0:
84
- match = re.search(r"sha256[^']*", result.stdout.decode("utf-8"))
117
+ if inspect_result.returncode == 0:
118
+ match = re.search(r"sha256[^']*", inspect_result.stdout.decode("utf-8"))
85
119
  if match and match.group(0):
86
120
  sha = match.group(0)
87
121
 
@@ -0,0 +1,56 @@
1
+ import subprocess
2
+ from unittest.mock import MagicMock, patch
3
+
4
+ from click.testing import CliRunner
5
+
6
+ from vellum_cli import main as cli_main
7
+
8
+
9
+ @patch("subprocess.run")
10
+ @patch("docker.from_env")
11
+ def test_image_push__self_hosted_happy_path(mock_docker_from_env, mock_run, vellum_client, monkeypatch):
12
+ # GIVEN a self hosted vellum api URL env var
13
+ monkeypatch.setenv("VELLUM_API_URL", "mycompany.api.com")
14
+
15
+ # Mock Docker client
16
+ mock_docker_client = MagicMock()
17
+ mock_docker_from_env.return_value = mock_docker_client
18
+
19
+ mock_run.side_effect = [
20
+ subprocess.CompletedProcess(
21
+ args="", returncode=0, stdout=b'{"manifests": [{"platform": {"architecture": "amd64"}}]}'
22
+ ),
23
+ subprocess.CompletedProcess(args="", returncode=0, stdout=b"manifest"),
24
+ subprocess.CompletedProcess(args="", returncode=0, stdout=b"sha256:hellosha"),
25
+ ]
26
+
27
+ # WHEN the user runs the image push command
28
+ runner = CliRunner()
29
+ result = runner.invoke(cli_main, ["image", "push", "myrepo.net/myimage:latest"])
30
+
31
+ # THEN the command exits successfully
32
+ assert result.exit_code == 0, result.output
33
+
34
+ # AND gives the success message
35
+ assert "Image successfully pushed" in result.output
36
+
37
+
38
+ @patch("subprocess.run")
39
+ @patch("docker.from_env")
40
+ def test_image_push__self_hosted_blocks_repo(mock_docker_from_env, mock_run, vellum_client, monkeypatch):
41
+ # GIVEN a self hosted vellum api URL env var
42
+ monkeypatch.setenv("VELLUM_API_URL", "mycompany.api.com")
43
+
44
+ # Mock Docker client
45
+ mock_docker_client = MagicMock()
46
+ mock_docker_from_env.return_value = mock_docker_client
47
+
48
+ # WHEN the user runs the image push command
49
+ runner = CliRunner()
50
+ result = runner.invoke(cli_main, ["image", "push", "myimage"])
51
+
52
+ # THEN the command exits unsuccessfully
53
+ assert result.exit_code == 1, result.output
54
+
55
+ # AND gives the error message for self hosted installs not including the repo
56
+ assert "For adding images to your self hosted install you must include" in result.output
@@ -4,6 +4,7 @@ from uuid import UUID
4
4
  from typing import (
5
5
  TYPE_CHECKING,
6
6
  Any,
7
+ ClassVar,
7
8
  Dict,
8
9
  ForwardRef,
9
10
  Generic,
@@ -59,17 +60,20 @@ _NodeDisplayAttrType = TypeVar("_NodeDisplayAttrType")
59
60
  class BaseNodeDisplayMeta(type):
60
61
  def __new__(mcs, name: str, bases: Tuple[Type, ...], dct: Dict[str, Any]) -> Any:
61
62
  cls = super().__new__(mcs, name, bases, dct)
63
+ return cls.__annotate_node__()
64
+
65
+ def __annotate_node__(cls):
62
66
  base_node_display_class = cast(Type["BaseNodeDisplay"], cls)
63
67
  node_class = base_node_display_class.infer_node_class()
64
68
  if not issubclass(node_class, BaseNode):
65
69
  return cls
66
70
 
67
- display_node_id = dct.get("node_id")
71
+ display_node_id = getattr(cls, "node_id", None)
68
72
  if isinstance(display_node_id, UUID):
69
73
  # Display classes are able to override the id of the node class it's parameterized by
70
74
  node_class.__id__ = display_node_id
71
75
 
72
- output_display = dct.get("output_display")
76
+ output_display = getattr(cls, "output_display", None)
73
77
  if isinstance(output_display, dict):
74
78
  # And the node class' output ids
75
79
  for reference, node_output_display in output_display.items():
@@ -86,6 +90,7 @@ class BaseNodeDisplayMeta(type):
86
90
  class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
87
91
  output_display: Dict[OutputReference, NodeOutputDisplay] = {}
88
92
  port_displays: Dict[Port, PortDisplayOverrides] = {}
93
+ node_input_ids_by_name: ClassVar[Dict[str, UUID]] = {}
89
94
 
90
95
  # Used to store the mapping between node types and their display classes
91
96
  _node_display_registry: Dict[Type[NodeType], Type["BaseNodeDisplay"]] = {}
@@ -326,6 +331,7 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
326
331
  IsNotNilExpression,
327
332
  IsUndefinedExpression,
328
333
  IsNotUndefinedExpression,
334
+ ParseJsonExpression,
329
335
  ),
330
336
  ):
331
337
  lhs = self.serialize_value(display_context, condition._expression)
@@ -403,9 +409,6 @@ class BaseNodeDisplay(Generic[NodeType], metaclass=BaseNodeDisplayMeta):
403
409
  "node_id": str(node_class_display.node_id),
404
410
  }
405
411
 
406
- if isinstance(value, ParseJsonExpression):
407
- raise ValueError("ParseJsonExpression is not supported in the UI")
408
-
409
412
  if not isinstance(value, BaseDescriptor):
410
413
  vellum_value = primitive_to_vellum_value(value)
411
414
  return {
@@ -1,5 +1,5 @@
1
1
  from uuid import UUID
2
- from typing import ClassVar, Dict, Optional, Union
2
+ from typing import ClassVar, Dict, Optional
3
3
 
4
4
  from vellum.workflows.nodes.utils import get_unadorned_node
5
5
  from vellum.workflows.ports import Port
@@ -17,9 +17,6 @@ class BaseNodeVellumDisplay(BaseNodeDisplay[NodeType]):
17
17
  # Used to explicitly set the target handle id for a node
18
18
  target_handle_id: ClassVar[Optional[UUID]] = None
19
19
 
20
- # Used to explicitly set the node input ids by name for a node
21
- node_input_ids_by_name: ClassVar[Dict[str, Union[UUID, str]]] = {}
22
-
23
20
  def _get_node_display_uuid(self, attribute: str) -> UUID:
24
21
  explicit_value = self._get_explicit_node_display_attr(attribute, UUID)
25
22
  return explicit_value if explicit_value else uuid4_from_hash(f"{self.node_id}|{attribute}")
@@ -1,7 +1,10 @@
1
1
  import types
2
- from typing import TYPE_CHECKING, Optional, Type
2
+ from uuid import UUID
3
+ from typing import TYPE_CHECKING, Any, Dict, Optional, Type
3
4
 
5
+ from vellum.workflows.descriptors.base import BaseDescriptor
4
6
  from vellum.workflows.types.generics import NodeType
7
+ from vellum.workflows.utils.uuids import uuid4_from_hash
5
8
  from vellum_ee.workflows.display.nodes.types import NodeOutputDisplay
6
9
 
7
10
  if TYPE_CHECKING:
@@ -26,16 +29,39 @@ def get_node_display_class(
26
29
 
27
30
  # `base_node_display_class` is always a Generic class, so it's safe to index into it
28
31
  NodeDisplayBaseClass = base_node_display_class[node_class] # type: ignore[index]
32
+
33
+ def _get_node_input_ids_by_ref(path: str, inst: Any):
34
+ if isinstance(inst, dict):
35
+ node_input_ids_by_name: Dict[str, UUID] = {}
36
+ for key, value in inst.items():
37
+ node_input_ids_by_name.update(_get_node_input_ids_by_ref(f"{path}.{key}", value))
38
+ return node_input_ids_by_name
39
+
40
+ if isinstance(inst, BaseDescriptor):
41
+ return {path: uuid4_from_hash(f"{node_class.__id__}|{path}")}
42
+
43
+ return {}
44
+
45
+ def exec_body(ns: Dict):
46
+ output_display = {
47
+ ref: NodeOutputDisplay(id=node_class.__output_ids__[ref.name], name=ref.name)
48
+ for ref in node_class.Outputs
49
+ if ref.name in node_class.__output_ids__
50
+ }
51
+ if output_display:
52
+ ns["output_display"] = output_display
53
+
54
+ node_input_ids_by_name: Dict[str, UUID] = {}
55
+ for ref in node_class:
56
+ node_input_ids_by_name.update(_get_node_input_ids_by_ref(ref.name, ref.instance))
57
+
58
+ if node_input_ids_by_name:
59
+ ns["node_input_ids_by_name"] = node_input_ids_by_name
60
+
29
61
  NodeDisplayClass = types.new_class(
30
62
  f"{node_class.__name__}Display",
31
63
  bases=(NodeDisplayBaseClass,),
64
+ exec_body=exec_body,
32
65
  )
33
- output_display = {
34
- ref: NodeOutputDisplay(id=node_class.__output_ids__[ref.name], name=ref.name)
35
- for ref in node_class.Outputs
36
- if ref.name in node_class.__output_ids__
37
- }
38
- if output_display:
39
- setattr(NodeDisplayClass, "output_display", output_display)
40
66
 
41
67
  return NodeDisplayClass
@@ -1,9 +1,14 @@
1
+ import re
2
+ import types
1
3
  from uuid import UUID
2
- from typing import Any, Callable, Generic, Optional, TypeVar, cast
4
+ from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar, cast
3
5
 
6
+ from vellum.workflows.nodes.bases.base import BaseNode
4
7
  from vellum.workflows.nodes.bases.base_adornment_node import BaseAdornmentNode
5
8
  from vellum.workflows.nodes.utils import get_wrapped_node
6
9
  from vellum.workflows.types.core import JsonArray, JsonObject
10
+ from vellum.workflows.types.utils import get_original_base
11
+ from vellum.workflows.utils.uuids import uuid4_from_hash
7
12
  from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
8
13
  from vellum_ee.workflows.display.nodes.base_node_vellum_display import BaseNodeVellumDisplay
9
14
  from vellum_ee.workflows.display.nodes.get_node_display_class import get_node_display_class
@@ -37,3 +42,52 @@ class BaseAdornmentNodeDisplay(BaseNodeVellumDisplay[_BaseAdornmentNodeType], Ge
37
42
  serialized_wrapped_node["adornments"] = adornments + [adornment] if adornment else adornments
38
43
 
39
44
  return serialized_wrapped_node
45
+
46
+ @classmethod
47
+ def wrap(cls, **kwargs: Any) -> Callable[..., Type[BaseNodeDisplay]]:
48
+ NodeDisplayType = TypeVar("NodeDisplayType", bound=BaseNodeDisplay)
49
+
50
+ def decorator(inner_cls: Type[NodeDisplayType]) -> Type[NodeDisplayType]:
51
+ node_class = inner_cls.infer_node_class()
52
+ wrapped_node_class = cast(Type[BaseNode], node_class.__wrapped_node__)
53
+
54
+ # `mypy` is wrong here, `cls` is indexable bc it's Generic
55
+ BaseAdornmentDisplay = cls[node_class] # type: ignore[index]
56
+
57
+ def exec_body(ns: Dict):
58
+ for key, kwarg in kwargs.items():
59
+ ns[key] = kwarg
60
+
61
+ if "node_id" not in kwargs:
62
+ ns["node_id"] = uuid4_from_hash(node_class.__qualname__)
63
+
64
+ AdornmentDisplay = types.new_class(
65
+ re.sub(r"^Base", "", cls.__name__), bases=(BaseAdornmentDisplay,), exec_body=exec_body
66
+ )
67
+
68
+ setattr(inner_cls, "__adorned_by__", AdornmentDisplay)
69
+
70
+ # We must edit the node display class to use __wrapped_node__ everywhere it
71
+ # references the adorned node class, which is three places:
72
+
73
+ # 1. The node display class' parameterized type
74
+ original_base_node_display = get_original_base(inner_cls)
75
+ original_base_node_display.__args__ = (wrapped_node_class,)
76
+ inner_cls._node_display_registry[wrapped_node_class] = inner_cls
77
+ inner_cls.__annotate_node__()
78
+
79
+ # 2. The node display class' output displays
80
+ old_outputs = list(inner_cls.output_display.keys())
81
+ for old_output in old_outputs:
82
+ new_output = getattr(wrapped_node_class.Outputs, old_output.name)
83
+ inner_cls.output_display[new_output] = inner_cls.output_display.pop(old_output)
84
+
85
+ # 3. The node display class' port displays
86
+ old_ports = list(inner_cls.port_displays.keys())
87
+ for old_port in old_ports:
88
+ new_port = getattr(wrapped_node_class.Ports, old_port.name)
89
+ inner_cls.port_displays[new_port] = inner_cls.port_displays.pop(old_port)
90
+
91
+ return inner_cls
92
+
93
+ return decorator
@@ -1,14 +1,12 @@
1
1
  import inspect
2
- from typing import Any, Callable, Generic, Optional, Tuple, Type, TypeVar, cast
2
+ from typing import Any, Generic, Tuple, Type, TypeVar, cast
3
3
 
4
4
  from vellum.workflows.descriptors.base import BaseDescriptor
5
- from vellum.workflows.errors.types import WorkflowErrorCode
6
5
  from vellum.workflows.nodes.bases.base import BaseNode
7
6
  from vellum.workflows.nodes.core.retry_node.node import RetryNode
8
7
  from vellum.workflows.nodes.utils import ADORNMENT_MODULE_NAME
9
8
  from vellum.workflows.references.output import OutputReference
10
9
  from vellum.workflows.types.core import JsonArray, JsonObject
11
- from vellum.workflows.types.utils import get_original_base
12
10
  from vellum.workflows.utils.uuids import uuid4_from_hash
13
11
  from vellum.workflows.workflows.base import BaseWorkflow
14
12
  from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
@@ -79,54 +77,3 @@ class BaseRetryNodeDisplay(BaseAdornmentNodeDisplay[_RetryNodeType], Generic[_Re
79
77
 
80
78
  inner_output = getattr(inner_node.Outputs, output.name)
81
79
  return node_display.get_node_output_display(inner_output)
82
-
83
- @classmethod
84
- def wrap(
85
- cls,
86
- max_attempts: int,
87
- delay: Optional[float] = None,
88
- retry_on_error_code: Optional[WorkflowErrorCode] = None,
89
- retry_on_condition: Optional[BaseDescriptor] = None,
90
- ) -> Callable[..., Type["BaseRetryNodeDisplay"]]:
91
- _max_attempts = max_attempts
92
- _delay = delay
93
- _retry_on_error_code = retry_on_error_code
94
- _retry_on_condition = retry_on_condition
95
-
96
- NodeDisplayType = TypeVar("NodeDisplayType", bound=BaseNodeDisplay)
97
-
98
- def decorator(inner_cls: Type[NodeDisplayType]) -> Type[NodeDisplayType]:
99
- node_class = inner_cls.infer_node_class()
100
- wrapped_node_class = cast(Type[BaseNode], node_class.__wrapped_node__)
101
-
102
- class RetryNodeDisplay(BaseRetryNodeDisplay[node_class]): # type: ignore[valid-type]
103
- max_attempts = _max_attempts
104
- delay = _delay
105
- retry_on_error_code = _retry_on_error_code
106
- retry_on_condition = _retry_on_condition
107
-
108
- setattr(inner_cls, "__adorned_by__", RetryNodeDisplay)
109
-
110
- # We must edit the node display class to use __wrapped_node__ everywhere it
111
- # references the adorned node class, which is three places:
112
-
113
- # 1. The node display class' parameterized type
114
- original_base_node_display = get_original_base(inner_cls)
115
- original_base_node_display.__args__ = (wrapped_node_class,)
116
- inner_cls._node_display_registry[wrapped_node_class] = inner_cls
117
-
118
- # 2. The node display class' output displays
119
- old_outputs = list(inner_cls.output_display.keys())
120
- for old_output in old_outputs:
121
- new_output = getattr(wrapped_node_class.Outputs, old_output.name)
122
- inner_cls.output_display[new_output] = inner_cls.output_display.pop(old_output)
123
-
124
- # 3. The node display class' port displays
125
- old_ports = list(inner_cls.port_displays.keys())
126
- for old_port in old_ports:
127
- new_port = getattr(wrapped_node_class.Ports, old_port.name)
128
- inner_cls.port_displays[new_port] = inner_cls.port_displays.pop(old_port)
129
-
130
- return inner_cls
131
-
132
- return decorator
@@ -23,14 +23,7 @@ class BaseTemplatingNodeDisplay(BaseNodeVellumDisplay[_TemplatingNodeType], Gene
23
23
  node = self._node
24
24
  node_id = self.node_id
25
25
 
26
- template_input_id = self.template_input_id or next(
27
- (
28
- UUID(input_id) if isinstance(input_id, str) else input_id
29
- for input_name, input_id in self.node_input_ids_by_name.items()
30
- if input_name == TEMPLATE_INPUT_NAME
31
- ),
32
- None,
33
- )
26
+ template_input_id = self.template_input_id or self.node_input_ids_by_name.get(TEMPLATE_INPUT_NAME)
34
27
 
35
28
  template_node_input = create_node_input(
36
29
  node_id=node_id,
@@ -1,6 +1,6 @@
1
1
  import inspect
2
2
  from uuid import UUID
3
- from typing import Any, Callable, ClassVar, Generic, Optional, Tuple, Type, TypeVar, cast
3
+ from typing import Any, ClassVar, Generic, Optional, Tuple, Type, TypeVar, cast
4
4
 
5
5
  from vellum.workflows.descriptors.base import BaseDescriptor
6
6
  from vellum.workflows.nodes.bases.base import BaseNode
@@ -8,7 +8,6 @@ from vellum.workflows.nodes.core.try_node.node import TryNode
8
8
  from vellum.workflows.nodes.utils import ADORNMENT_MODULE_NAME
9
9
  from vellum.workflows.references.output import OutputReference
10
10
  from vellum.workflows.types.core import JsonArray, JsonObject
11
- from vellum.workflows.types.utils import get_original_base
12
11
  from vellum.workflows.utils.uuids import uuid4_from_hash
13
12
  from vellum.workflows.workflows.base import BaseWorkflow
14
13
  from vellum_ee.workflows.display.nodes.base_node_display import BaseNodeDisplay
@@ -93,43 +92,3 @@ class BaseTryNodeDisplay(BaseAdornmentNodeDisplay[_TryNodeType], Generic[_TryNod
93
92
 
94
93
  inner_output = getattr(inner_node.Outputs, output.name)
95
94
  return node_display.get_node_output_display(inner_output)
96
-
97
- @classmethod
98
- def wrap(cls, error_output_id: Optional[UUID] = None) -> Callable[..., Type["BaseTryNodeDisplay"]]:
99
- _error_output_id = error_output_id
100
-
101
- NodeDisplayType = TypeVar("NodeDisplayType", bound=BaseNodeDisplay)
102
-
103
- def decorator(inner_cls: Type[NodeDisplayType]) -> Type[NodeDisplayType]:
104
- node_class = inner_cls.infer_node_class()
105
- wrapped_node_class = cast(Type[BaseNode], node_class.__wrapped_node__)
106
-
107
- # Mypy gets mad about dynamic parameter types like this, but it's fine
108
- class TryNodeDisplay(BaseTryNodeDisplay[node_class]): # type: ignore[valid-type]
109
- error_output_id = _error_output_id
110
-
111
- setattr(inner_cls, "__adorned_by__", TryNodeDisplay)
112
-
113
- # We must edit the node display class to use __wrapped_node__ everywhere it
114
- # references the adorned node class, which is three places:
115
-
116
- # 1. The node display class' parameterized type
117
- original_base_node_display = get_original_base(inner_cls)
118
- original_base_node_display.__args__ = (wrapped_node_class,)
119
- inner_cls._node_display_registry[wrapped_node_class] = inner_cls
120
-
121
- # 2. The node display class' output displays
122
- old_outputs = list(inner_cls.output_display.keys())
123
- for old_output in old_outputs:
124
- new_output = getattr(wrapped_node_class.Outputs, old_output.name)
125
- inner_cls.output_display[new_output] = inner_cls.output_display.pop(old_output)
126
-
127
- # 3. The node display class' port displays
128
- old_ports = list(inner_cls.port_displays.keys())
129
- for old_port in old_ports:
130
- new_port = getattr(wrapped_node_class.Ports, old_port.name)
131
- inner_cls.port_displays[new_port] = inner_cls.port_displays.pop(old_port)
132
-
133
- return inner_cls
134
-
135
- return decorator
@@ -1,4 +1,3 @@
1
- import pytest
2
1
  from uuid import UUID
3
2
  from typing import Dict
4
3
 
@@ -231,11 +230,7 @@ def test_vellum_workflow_display__serialize_with_unused_nodes_and_edges():
231
230
  assert edge_found, "Edge between unused nodes NodeB and NodeC not found in serialized output"
232
231
 
233
232
 
234
- def test_parse_json_not_supported_in_ui():
235
- """
236
- Test that verifies ParseJsonExpression is not yet supported in the UI.
237
- This test should fail once UI support is added, at which point it should be updated.
238
- """
233
+ def test_vellum_workflow_display__serialize_with_parse_json_expression():
239
234
  # GIVEN a workflow that uses the parse_json function
240
235
  from vellum.workflows.references.constant import ConstantValueReference
241
236
 
@@ -249,13 +244,55 @@ def test_parse_json_not_supported_in_ui():
249
244
  class Outputs(BaseWorkflow.Outputs):
250
245
  final = JsonNode.Outputs.json_result
251
246
 
252
- # WHEN we attempt to serialize it
247
+ # AND a display class for this workflow
253
248
  workflow_display = get_workflow_display(
254
249
  base_display_class=VellumWorkflowDisplay,
255
250
  workflow_class=Workflow,
256
251
  )
257
252
 
258
- with pytest.raises(ValueError) as exc_info:
259
- workflow_display.serialize()
253
+ # WHEN we serialize the workflow
254
+ exec_config = workflow_display.serialize()
255
+
256
+ # THEN the serialized workflow contains the parse_json expression
257
+ raw_data = exec_config["workflow_raw_data"]
258
+ assert isinstance(raw_data, dict)
260
259
 
261
- assert "ParseJsonExpression is not supported in the UI" == str(exc_info.value)
260
+ nodes = raw_data["nodes"]
261
+ assert isinstance(nodes, list)
262
+
263
+ json_node = None
264
+ for node in nodes:
265
+ assert isinstance(node, dict)
266
+ definition = node.get("definition")
267
+ if node.get("type") == "GENERIC" and isinstance(definition, dict) and definition.get("name") == "JsonNode":
268
+ json_node = node
269
+ break
270
+
271
+ assert json_node is not None
272
+
273
+ outputs = json_node.get("outputs", [])
274
+ assert isinstance(outputs, list)
275
+
276
+ json_result = None
277
+ for output in outputs:
278
+ assert isinstance(output, dict)
279
+ if output.get("name") == "json_result":
280
+ json_result = output
281
+ break
282
+
283
+ assert json_result == {
284
+ "id": "44c7d94c-a76a-4151-9b95-85a31764f18f",
285
+ "name": "json_result",
286
+ "type": "JSON",
287
+ "value": {
288
+ "type": "UNARY_EXPRESSION",
289
+ "lhs": {
290
+ "type": "CONSTANT_VALUE",
291
+ "value": {
292
+ "type": "STRING",
293
+ "value": '{"key": "value"}',
294
+ },
295
+ },
296
+ "operator": "parseJson",
297
+ },
298
+ }