vellum-ai 1.0.5__py3-none-any.whl → 1.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. vellum/__init__.py +0 -8
  2. vellum/client/core/client_wrapper.py +2 -2
  3. vellum/client/types/__init__.py +0 -8
  4. vellum/client/types/organization_read.py +1 -2
  5. vellum/workflows/events/context.py +111 -0
  6. vellum/workflows/integrations/__init__.py +0 -0
  7. vellum/workflows/integrations/composio_service.py +138 -0
  8. vellum/workflows/nodes/displayable/bases/api_node/node.py +27 -9
  9. vellum/workflows/nodes/displayable/bases/api_node/tests/__init__.py +0 -0
  10. vellum/workflows/nodes/displayable/bases/api_node/tests/test_node.py +47 -0
  11. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_composio_service.py +63 -58
  12. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +49 -38
  13. vellum/workflows/types/definition.py +4 -2
  14. {vellum_ai-1.0.5.dist-info → vellum_ai-1.0.6.dist-info}/METADATA +1 -3
  15. {vellum_ai-1.0.5.dist-info → vellum_ai-1.0.6.dist-info}/RECORD +23 -27
  16. vellum_cli/push.py +11 -2
  17. vellum_cli/tests/test_push.py +57 -1
  18. vellum_ee/workflows/display/nodes/vellum/code_execution_node.py +2 -0
  19. vellum_ee/workflows/display/nodes/vellum/tests/test_code_execution_node.py +16 -0
  20. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_composio_serialization.py +3 -0
  21. vellum/client/types/name_enum.py +0 -7
  22. vellum/client/types/organization_limit_config.py +0 -25
  23. vellum/client/types/quota.py +0 -22
  24. vellum/client/types/vembda_service_tier_enum.py +0 -5
  25. vellum/types/name_enum.py +0 -3
  26. vellum/types/organization_limit_config.py +0 -3
  27. vellum/types/quota.py +0 -3
  28. vellum/types/vembda_service_tier_enum.py +0 -3
  29. vellum/workflows/nodes/displayable/tool_calling_node/composio_service.py +0 -83
  30. {vellum_ai-1.0.5.dist-info → vellum_ai-1.0.6.dist-info}/LICENSE +0 -0
  31. {vellum_ai-1.0.5.dist-info → vellum_ai-1.0.6.dist-info}/WHEEL +0 -0
  32. {vellum_ai-1.0.5.dist-info → vellum_ai-1.0.6.dist-info}/entry_points.txt +0 -0
@@ -13,6 +13,7 @@ from vellum.client.core.api_error import ApiError
13
13
  from vellum.client.types.workflow_push_response import WorkflowPushResponse
14
14
  from vellum.utils.uuid import is_valid_uuid
15
15
  from vellum_cli import main as cli_main
16
+ from vellum_ee.workflows.display.nodes.utils import to_kebab_case
16
17
 
17
18
 
18
19
  def _extract_tar_gz(tar_gz_bytes: bytes) -> dict[str, str]:
@@ -391,7 +392,9 @@ def test_push__deployment(mock_module, vellum_client, base_command):
391
392
  assert json.loads(call_args["exec_config"])["workflow_raw_data"]["definition"]["name"] == "ExampleWorkflow"
392
393
  assert is_valid_uuid(call_args["workflow_sandbox_id"])
393
394
  assert call_args["artifact"].name == expected_artifact_name
394
- assert call_args["deployment_config"] == "{}"
395
+ expected_deployment_name = to_kebab_case(module.split(".")[-1])
396
+ deployment_config = json.loads(call_args["deployment_config"])
397
+ assert deployment_config["name"] == expected_deployment_name
395
398
 
396
399
  extracted_files = _extract_tar_gz(call_args["artifact"].read())
397
400
  assert extracted_files["workflow.py"] == workflow_py_file_content
@@ -1039,3 +1042,56 @@ def test_push__deploy_with_release_tags_success(mock_module, vellum_client):
1039
1042
  # AND should show success message
1040
1043
  assert "Successfully pushed" in result.output
1041
1044
  assert "Updated vellum.lock.json file." in result.output
1045
+
1046
+
1047
+ def test_push__deploy_stores_deployment_config_in_lock_file(mock_module, vellum_client):
1048
+ # GIVEN a single workflow
1049
+ temp_dir = mock_module.temp_dir
1050
+ module = mock_module.module
1051
+
1052
+ # AND a workflow exists in the module successfully
1053
+ _ensure_workflow_py(temp_dir, module)
1054
+
1055
+ # AND the push API call returns successfully with a deployment
1056
+ workflow_deployment_id = str(uuid4())
1057
+ vellum_client.workflows.push.return_value = WorkflowPushResponse(
1058
+ workflow_sandbox_id=str(uuid4()),
1059
+ workflow_deployment_id=workflow_deployment_id,
1060
+ )
1061
+
1062
+ # WHEN calling `vellum workflows push --deploy` for the first time
1063
+ runner = CliRunner()
1064
+ result = runner.invoke(cli_main, ["workflows", "push", module, "--deploy"])
1065
+
1066
+ # THEN it should succeed
1067
+ assert result.exit_code == 0, result.output
1068
+
1069
+ # AND the deployment config should be stored in the lock file with the deployment ID and module name
1070
+ with open(os.path.join(temp_dir, "vellum.lock.json")) as f:
1071
+ lock_data = json.loads(f.read())
1072
+ assert len(lock_data["workflows"][0]["deployments"]) == 1
1073
+ deployment = lock_data["workflows"][0]["deployments"][0]
1074
+ assert str(deployment["id"]) == workflow_deployment_id
1075
+ assert deployment["name"] == "test-push-deploy-stores-deployment-config-in-lock-file"
1076
+ assert deployment.get("label") is None
1077
+ assert deployment.get("description") is None
1078
+ assert deployment.get("release_tags") is None
1079
+
1080
+ # AND when we do a second push
1081
+ vellum_client.workflows.push.reset_mock()
1082
+ vellum_client.workflows.push.return_value = WorkflowPushResponse(
1083
+ workflow_sandbox_id=str(uuid4()),
1084
+ workflow_deployment_id=workflow_deployment_id,
1085
+ )
1086
+
1087
+ result = runner.invoke(cli_main, ["workflows", "push", module, "--deploy"])
1088
+
1089
+ # THEN it should succeed
1090
+ assert result.exit_code == 0, result.output
1091
+
1092
+ # AND we should have called the push API with the module name as deployment name
1093
+ vellum_client.workflows.push.assert_called_once()
1094
+ call_args = vellum_client.workflows.push.call_args.kwargs
1095
+ deployment_config_str = call_args["deployment_config"]
1096
+ deployment_config = json.loads(deployment_config_str)
1097
+ assert deployment_config["name"] == "test-push-deploy-stores-deployment-config-in-lock-file"
@@ -40,6 +40,8 @@ class BaseCodeExecutionNodeDisplay(BaseNodeDisplay[_CodeExecutionNodeType], Gene
40
40
  node_filepath=node_file_path,
41
41
  script_filepath=filepath,
42
42
  )
43
+ if not file_code:
44
+ raise Exception(f"Filepath '{filepath}' of node {node.__name__} does not exist")
43
45
  code_value = file_code
44
46
  else:
45
47
  code_value = ""
@@ -153,3 +153,19 @@ def test_serialize_node__with_unresolved_secret_references(vellum_client):
153
153
  # warnings = list(workflow_display.errors)
154
154
  # assert len(warnings) == 1
155
155
  # assert "Failed to resolve secret reference 'MY_API_KEY'" in str(warnings[0])
156
+
157
+
158
+ def test_serialize_node__with_non_exist_code_input_path():
159
+ # GIVEN a code node with a non-existent code input path
160
+ class MyNode(CodeExecutionNode):
161
+ filepath = "non_existent_file.py"
162
+
163
+ # AND a workflow with the code node
164
+ class Workflow(BaseWorkflow):
165
+ graph = MyNode
166
+
167
+ # WHEN we serialize the workflow
168
+ workflow_display = get_workflow_display(workflow_class=Workflow)
169
+ with pytest.raises(Exception) as exc_info:
170
+ workflow_display.serialize()
171
+ assert "Filepath 'non_existent_file.py' of node MyNode does not exist" in str(exc_info.value)
@@ -54,6 +54,9 @@ def test_serialize_workflow():
54
54
  "action": "GITHUB_CREATE_AN_ISSUE",
55
55
  "description": "Create a new issue in a GitHub repository",
56
56
  "display_name": "Create GitHub Issue",
57
+ "parameters": None,
58
+ "version": None,
59
+ "tags": None,
57
60
  }
58
61
 
59
62
  # AND the rest of the node structure should be correct
@@ -1,7 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- NameEnum = typing.Union[
6
- typing.Literal["workflow_executions", "prompt_executions", "workflow_runtime_seconds"], typing.Any
7
- ]
@@ -1,25 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- from ..core.pydantic_utilities import UniversalBaseModel
4
- import typing
5
- from .vembda_service_tier_enum import VembdaServiceTierEnum
6
- from .quota import Quota
7
- from ..core.pydantic_utilities import IS_PYDANTIC_V2
8
- import pydantic
9
-
10
-
11
- class OrganizationLimitConfig(UniversalBaseModel):
12
- vembda_service_tier: typing.Optional[VembdaServiceTierEnum] = None
13
- prompt_executions_quota: typing.Optional[Quota] = None
14
- workflow_executions_quota: typing.Optional[Quota] = None
15
- workflow_runtime_seconds_quota: typing.Optional[Quota] = None
16
- max_workflow_runtime_seconds: typing.Optional[int] = None
17
-
18
- if IS_PYDANTIC_V2:
19
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
20
- else:
21
-
22
- class Config:
23
- frozen = True
24
- smart_union = True
25
- extra = pydantic.Extra.allow
@@ -1,22 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- from ..core.pydantic_utilities import UniversalBaseModel
4
- from .name_enum import NameEnum
5
- import typing
6
- from ..core.pydantic_utilities import IS_PYDANTIC_V2
7
- import pydantic
8
-
9
-
10
- class Quota(UniversalBaseModel):
11
- name: NameEnum
12
- value: typing.Optional[int] = None
13
- period_seconds: typing.Optional[int] = None
14
-
15
- if IS_PYDANTIC_V2:
16
- model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
17
- else:
18
-
19
- class Config:
20
- frozen = True
21
- smart_union = True
22
- extra = pydantic.Extra.allow
@@ -1,5 +0,0 @@
1
- # This file was auto-generated by Fern from our API Definition.
2
-
3
- import typing
4
-
5
- VembdaServiceTierEnum = typing.Union[typing.Literal["FREE", "PAID"], typing.Any]
vellum/types/name_enum.py DELETED
@@ -1,3 +0,0 @@
1
- # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
-
3
- from vellum.client.types.name_enum import *
@@ -1,3 +0,0 @@
1
- # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
-
3
- from vellum.client.types.organization_limit_config import *
vellum/types/quota.py DELETED
@@ -1,3 +0,0 @@
1
- # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
-
3
- from vellum.client.types.quota import *
@@ -1,3 +0,0 @@
1
- # WARNING: This file will be removed in a future release. Please import from "vellum.client" instead.
2
-
3
- from vellum.client.types.vembda_service_tier_enum import *
@@ -1,83 +0,0 @@
1
- from dataclasses import dataclass
2
- from typing import Any, Dict, List
3
-
4
- from composio import Action, Composio
5
- from composio_client import Composio as ComposioClient
6
-
7
-
8
- @dataclass
9
- class ConnectionInfo:
10
- """Information about a user's authorized connection"""
11
-
12
- connection_id: str
13
- integration_name: str
14
- created_at: str
15
- updated_at: str
16
- status: str = "ACTIVE" # TODO: Use enum if we end up supporting integrations that the user has not yet connected to
17
-
18
-
19
- class ComposioAccountService:
20
- """Manages user authorized connections using composio-client"""
21
-
22
- def __init__(self, api_key: str):
23
- self.client = ComposioClient(api_key=api_key)
24
-
25
- def get_user_connections(self) -> List[ConnectionInfo]:
26
- """Get all authorized connections for the user"""
27
- response = self.client.connected_accounts.list()
28
-
29
- return [
30
- ConnectionInfo(
31
- connection_id=item.id,
32
- integration_name=item.toolkit.slug,
33
- status=item.status,
34
- created_at=item.created_at,
35
- updated_at=item.updated_at,
36
- )
37
- for item in response.items
38
- ]
39
-
40
-
41
- class ComposioCoreService:
42
- """Handles tool execution using composio-core"""
43
-
44
- def __init__(self, api_key: str):
45
- self.client = Composio(api_key=api_key)
46
-
47
- def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
48
- """Execute a tool using composio-core
49
-
50
- Args:
51
- tool_name: The name of the tool to execute (e.g., "HACKERNEWS_GET_USER")
52
- arguments: Dictionary of arguments to pass to the tool
53
-
54
- Returns:
55
- The result of the tool execution
56
- """
57
- # Convert tool name string to Action enum
58
- action = getattr(Action, tool_name)
59
- return self.client.actions.execute(action, params=arguments)
60
-
61
-
62
- class ComposioService:
63
- """Unified interface for Composio operations"""
64
-
65
- def __init__(self, api_key: str):
66
- self.accounts = ComposioAccountService(api_key)
67
- self.core = ComposioCoreService(api_key)
68
-
69
- def get_user_connections(self) -> List[ConnectionInfo]:
70
- """Get user's authorized connections"""
71
- return self.accounts.get_user_connections()
72
-
73
- def execute_tool(self, tool_name: str, arguments: Dict[str, Any]) -> Any:
74
- """Execute a tool using composio-core
75
-
76
- Args:
77
- tool_name: The name of the tool to execute (e.g., "HACKERNEWS_GET_USER")
78
- arguments: Dictionary of arguments to pass to the tool
79
-
80
- Returns:
81
- The result of the tool execution
82
- """
83
- return self.core.execute_tool(tool_name, arguments)