vellum-ai 1.4.1__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. vellum/__init__.py +14 -0
  2. vellum/client/__init__.py +3 -0
  3. vellum/client/core/client_wrapper.py +2 -2
  4. vellum/client/reference.md +160 -0
  5. vellum/client/resources/__init__.py +2 -0
  6. vellum/client/resources/integrations/__init__.py +4 -0
  7. vellum/client/resources/integrations/client.py +260 -0
  8. vellum/client/resources/integrations/raw_client.py +267 -0
  9. vellum/client/types/__init__.py +12 -0
  10. vellum/client/types/components_schemas_composio_execute_tool_request.py +5 -0
  11. vellum/client/types/components_schemas_composio_execute_tool_response.py +5 -0
  12. vellum/client/types/components_schemas_composio_tool_definition.py +5 -0
  13. vellum/client/types/composio_execute_tool_request.py +24 -0
  14. vellum/client/types/composio_execute_tool_response.py +24 -0
  15. vellum/client/types/composio_tool_definition.py +26 -0
  16. vellum/client/types/vellum_error_code_enum.py +2 -0
  17. vellum/client/types/vellum_sdk_error.py +1 -0
  18. vellum/client/types/workflow_event_error.py +1 -0
  19. vellum/resources/integrations/__init__.py +3 -0
  20. vellum/resources/integrations/client.py +3 -0
  21. vellum/resources/integrations/raw_client.py +3 -0
  22. vellum/types/components_schemas_composio_execute_tool_request.py +3 -0
  23. vellum/types/components_schemas_composio_execute_tool_response.py +3 -0
  24. vellum/types/components_schemas_composio_tool_definition.py +3 -0
  25. vellum/types/composio_execute_tool_request.py +3 -0
  26. vellum/types/composio_execute_tool_response.py +3 -0
  27. vellum/types/composio_tool_definition.py +3 -0
  28. vellum/workflows/constants.py +4 -0
  29. vellum/workflows/emitters/base.py +8 -0
  30. vellum/workflows/emitters/vellum_emitter.py +10 -0
  31. vellum/workflows/inputs/dataset_row.py +2 -2
  32. vellum/workflows/nodes/bases/base.py +12 -1
  33. vellum/workflows/nodes/displayable/bases/base_prompt_node/node.py +6 -0
  34. vellum/workflows/nodes/displayable/bases/inline_prompt_node/node.py +16 -2
  35. vellum/workflows/nodes/displayable/final_output_node/node.py +59 -0
  36. vellum/workflows/nodes/displayable/final_output_node/tests/test_node.py +40 -1
  37. vellum/workflows/nodes/displayable/tool_calling_node/node.py +3 -0
  38. vellum/workflows/nodes/displayable/tool_calling_node/tests/test_utils.py +64 -0
  39. vellum/workflows/nodes/displayable/tool_calling_node/utils.py +30 -41
  40. vellum/workflows/runner/runner.py +132 -110
  41. vellum/workflows/tests/test_dataset_row.py +29 -0
  42. vellum/workflows/types/core.py +13 -2
  43. vellum/workflows/types/definition.py +13 -1
  44. vellum/workflows/utils/functions.py +69 -27
  45. vellum/workflows/utils/tests/test_functions.py +50 -6
  46. vellum/workflows/vellum_client.py +7 -1
  47. vellum/workflows/workflows/base.py +26 -4
  48. vellum/workflows/workflows/tests/test_base_workflow.py +54 -0
  49. {vellum_ai-1.4.1.dist-info → vellum_ai-1.5.0.dist-info}/METADATA +1 -1
  50. {vellum_ai-1.4.1.dist-info → vellum_ai-1.5.0.dist-info}/RECORD +63 -42
  51. vellum_ai-1.5.0.dist-info/entry_points.txt +4 -0
  52. vellum_cli/tests/test_pull.py +1 -0
  53. vellum_cli/tests/test_push.py +2 -0
  54. vellum_ee/assets/node-definitions.json +483 -0
  55. vellum_ee/scripts/generate_node_definitions.py +89 -0
  56. vellum_ee/workflows/display/nodes/vellum/inline_prompt_node.py +1 -3
  57. vellum_ee/workflows/display/nodes/vellum/tests/test_final_output_node.py +78 -0
  58. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_inline_workflow_serialization.py +5 -0
  59. vellum_ee/workflows/display/tests/workflow_serialization/test_basic_tool_calling_node_serialization.py +5 -0
  60. vellum_ee/workflows/display/types.py +3 -0
  61. vellum_ee/workflows/display/workflows/base_workflow_display.py +6 -0
  62. vellum_ai-1.4.1.dist-info/entry_points.txt +0 -3
  63. {vellum_ai-1.4.1.dist-info → vellum_ai-1.5.0.dist-info}/LICENSE +0 -0
  64. {vellum_ai-1.4.1.dist-info → vellum_ai-1.5.0.dist-info}/WHEEL +0 -0
@@ -42,6 +42,7 @@ from vellum.workflows.events import (
42
42
  WorkflowExecutionStreamingEvent,
43
43
  )
44
44
  from vellum.workflows.events.node import (
45
+ NodeEvent,
45
46
  NodeExecutionFulfilledBody,
46
47
  NodeExecutionInitiatedBody,
47
48
  NodeExecutionRejectedBody,
@@ -212,6 +213,10 @@ class WorkflowRunner(Generic[StateType]):
212
213
  descriptor for descriptor in self.workflow.Outputs if isinstance(descriptor.instance, StateValueReference)
213
214
  ]
214
215
 
216
+ self._background_thread: Optional[Thread] = None
217
+ self._cancel_thread: Optional[Thread] = None
218
+ self._stream_thread: Optional[Thread] = None
219
+
215
220
  def _snapshot_state(self, state: StateType, deltas: List[StateDelta]) -> StateType:
216
221
  self._workflow_event_inner_queue.put(
217
222
  WorkflowExecutionSnapshottedEvent(
@@ -259,17 +264,36 @@ class WorkflowRunner(Generic[StateType]):
259
264
  return event
260
265
 
261
266
  def _run_work_item(self, node: BaseNode[StateType], span_id: UUID) -> None:
267
+ for event in self.run_node(node, span_id):
268
+ self._workflow_event_inner_queue.put(event)
269
+
270
+ def run_node(
271
+ self,
272
+ node: "BaseNode[StateType]",
273
+ span_id: UUID,
274
+ ) -> Generator[NodeEvent, None, None]:
275
+ """
276
+ Execute a single node and yield workflow events.
277
+
278
+ Args:
279
+ node: The node instance to execute
280
+ span_id: Unique identifier for this node execution
281
+
282
+ Yields:
283
+ NodeExecutionEvent: Events emitted during node execution (initiated, streaming, fulfilled, rejected)
284
+ """
262
285
  execution = get_execution_context()
263
- self._workflow_event_inner_queue.put(
264
- NodeExecutionInitiatedEvent(
265
- trace_id=execution.trace_id,
266
- span_id=span_id,
267
- body=NodeExecutionInitiatedBody(
268
- node_definition=node.__class__,
269
- inputs=node._inputs,
270
- ),
271
- parent=execution.parent_context,
272
- )
286
+
287
+ node_output_mocks_map = self.workflow.context.node_output_mocks_map
288
+
289
+ yield NodeExecutionInitiatedEvent(
290
+ trace_id=execution.trace_id,
291
+ span_id=span_id,
292
+ body=NodeExecutionInitiatedBody(
293
+ node_definition=node.__class__,
294
+ inputs=node._inputs,
295
+ ),
296
+ parent=execution.parent_context,
273
297
  )
274
298
 
275
299
  logger.debug(f"Started running node: {node.__class__.__name__}")
@@ -282,7 +306,7 @@ class WorkflowRunner(Generic[StateType]):
282
306
  )
283
307
  node_run_response: NodeRunResponse
284
308
  was_mocked: Optional[bool] = None
285
- mock_candidates = self.workflow.context.node_output_mocks_map.get(node.Outputs) or []
309
+ mock_candidates = node_output_mocks_map.get(node.Outputs) or []
286
310
  for mock_candidate in mock_candidates:
287
311
  if mock_candidate.when_condition.resolve(node.state):
288
312
  node_run_response = mock_candidate.then_outputs
@@ -312,8 +336,9 @@ class WorkflowRunner(Generic[StateType]):
312
336
  streaming_output_queues: Dict[str, Queue] = {}
313
337
  outputs = node.Outputs()
314
338
 
315
- def initiate_node_streaming_output(output: BaseOutput) -> None:
316
- execution = get_execution_context()
339
+ def initiate_node_streaming_output(
340
+ output: BaseOutput,
341
+ ) -> Generator[NodeExecutionStreamingEvent, None, None]:
317
342
  streaming_output_queues[output.name] = Queue()
318
343
  output_descriptor = OutputReference(
319
344
  name=output.name,
@@ -325,57 +350,51 @@ class WorkflowRunner(Generic[StateType]):
325
350
  node.state.meta.node_outputs[output_descriptor] = streaming_output_queues[output.name]
326
351
  initiated_output: BaseOutput = BaseOutput(name=output.name)
327
352
  initiated_ports = initiated_output > ports
328
- self._workflow_event_inner_queue.put(
329
- NodeExecutionStreamingEvent(
330
- trace_id=execution.trace_id,
331
- span_id=span_id,
332
- body=NodeExecutionStreamingBody(
333
- node_definition=node.__class__,
334
- output=initiated_output,
335
- invoked_ports=initiated_ports,
336
- ),
337
- parent=execution.parent_context,
353
+ yield NodeExecutionStreamingEvent(
354
+ trace_id=execution.trace_id,
355
+ span_id=span_id,
356
+ body=NodeExecutionStreamingBody(
357
+ node_definition=node.__class__,
358
+ output=initiated_output,
359
+ invoked_ports=initiated_ports,
338
360
  ),
361
+ parent=execution.parent_context,
339
362
  )
340
363
 
341
364
  with execution_context(parent_context=updated_parent_context, trace_id=execution.trace_id):
342
365
  for output in node_run_response:
343
366
  invoked_ports = output > ports
344
367
  if output.is_initiated:
345
- initiate_node_streaming_output(output)
368
+ yield from initiate_node_streaming_output(output)
346
369
  elif output.is_streaming:
347
370
  if output.name not in streaming_output_queues:
348
- initiate_node_streaming_output(output)
371
+ yield from initiate_node_streaming_output(output)
349
372
 
350
373
  streaming_output_queues[output.name].put(output.delta)
351
- self._workflow_event_inner_queue.put(
352
- NodeExecutionStreamingEvent(
353
- trace_id=execution.trace_id,
354
- span_id=span_id,
355
- body=NodeExecutionStreamingBody(
356
- node_definition=node.__class__,
357
- output=output,
358
- invoked_ports=invoked_ports,
359
- ),
360
- parent=execution.parent_context,
374
+ yield NodeExecutionStreamingEvent(
375
+ trace_id=execution.trace_id,
376
+ span_id=span_id,
377
+ body=NodeExecutionStreamingBody(
378
+ node_definition=node.__class__,
379
+ output=output,
380
+ invoked_ports=invoked_ports,
361
381
  ),
382
+ parent=execution.parent_context,
362
383
  )
363
384
  elif output.is_fulfilled:
364
385
  if output.name in streaming_output_queues:
365
386
  streaming_output_queues[output.name].put(undefined)
366
387
 
367
388
  setattr(outputs, output.name, output.value)
368
- self._workflow_event_inner_queue.put(
369
- NodeExecutionStreamingEvent(
370
- trace_id=execution.trace_id,
371
- span_id=span_id,
372
- body=NodeExecutionStreamingBody(
373
- node_definition=node.__class__,
374
- output=output,
375
- invoked_ports=invoked_ports,
376
- ),
377
- parent=execution.parent_context,
378
- )
389
+ yield NodeExecutionStreamingEvent(
390
+ trace_id=execution.trace_id,
391
+ span_id=span_id,
392
+ body=NodeExecutionStreamingBody(
393
+ node_definition=node.__class__,
394
+ output=output,
395
+ invoked_ports=invoked_ports,
396
+ ),
397
+ parent=execution.parent_context,
379
398
  )
380
399
 
381
400
  node.state.meta.node_execution_cache.fulfill_node_execution(node.__class__, span_id)
@@ -390,66 +409,57 @@ class WorkflowRunner(Generic[StateType]):
390
409
  node.state.meta.node_outputs[descriptor] = output_value
391
410
 
392
411
  invoked_ports = ports(outputs, node.state)
393
- self._workflow_event_inner_queue.put(
394
- NodeExecutionFulfilledEvent(
395
- trace_id=execution.trace_id,
396
- span_id=span_id,
397
- body=NodeExecutionFulfilledBody(
398
- node_definition=node.__class__,
399
- outputs=outputs,
400
- invoked_ports=invoked_ports,
401
- mocked=was_mocked,
402
- ),
403
- parent=execution.parent_context,
404
- )
412
+ yield NodeExecutionFulfilledEvent(
413
+ trace_id=execution.trace_id,
414
+ span_id=span_id,
415
+ body=NodeExecutionFulfilledBody(
416
+ node_definition=node.__class__,
417
+ outputs=outputs,
418
+ invoked_ports=invoked_ports,
419
+ mocked=was_mocked,
420
+ ),
421
+ parent=execution.parent_context,
405
422
  )
406
423
  except NodeException as e:
407
424
  logger.info(e)
408
425
  captured_stacktrace = traceback.format_exc()
409
426
 
410
- self._workflow_event_inner_queue.put(
411
- NodeExecutionRejectedEvent(
412
- trace_id=execution.trace_id,
413
- span_id=span_id,
414
- body=NodeExecutionRejectedBody(
415
- node_definition=node.__class__,
416
- error=e.error,
417
- stacktrace=captured_stacktrace,
418
- ),
419
- parent=execution.parent_context,
420
- )
427
+ yield NodeExecutionRejectedEvent(
428
+ trace_id=execution.trace_id,
429
+ span_id=span_id,
430
+ body=NodeExecutionRejectedBody(
431
+ node_definition=node.__class__,
432
+ error=e.error,
433
+ stacktrace=captured_stacktrace,
434
+ ),
435
+ parent=execution.parent_context,
421
436
  )
422
437
  except WorkflowInitializationException as e:
423
438
  logger.info(e)
424
439
  captured_stacktrace = traceback.format_exc()
425
- self._workflow_event_inner_queue.put(
426
- NodeExecutionRejectedEvent(
427
- trace_id=execution.trace_id,
428
- span_id=span_id,
429
- body=NodeExecutionRejectedBody(
430
- node_definition=node.__class__,
431
- error=e.error,
432
- stacktrace=captured_stacktrace,
433
- ),
434
- parent=execution.parent_context,
435
- )
440
+ yield NodeExecutionRejectedEvent(
441
+ trace_id=execution.trace_id,
442
+ span_id=span_id,
443
+ body=NodeExecutionRejectedBody(
444
+ node_definition=node.__class__,
445
+ error=e.error,
446
+ stacktrace=captured_stacktrace,
447
+ ),
448
+ parent=execution.parent_context,
436
449
  )
437
450
  except InvalidExpressionException as e:
438
451
  logger.info(e)
439
452
  captured_stacktrace = traceback.format_exc()
440
- self._workflow_event_inner_queue.put(
441
- NodeExecutionRejectedEvent(
442
- trace_id=execution.trace_id,
443
- span_id=span_id,
444
- body=NodeExecutionRejectedBody(
445
- node_definition=node.__class__,
446
- error=e.error,
447
- stacktrace=captured_stacktrace,
448
- ),
449
- parent=execution.parent_context,
450
- )
453
+ yield NodeExecutionRejectedEvent(
454
+ trace_id=execution.trace_id,
455
+ span_id=span_id,
456
+ body=NodeExecutionRejectedBody(
457
+ node_definition=node.__class__,
458
+ error=e.error,
459
+ stacktrace=captured_stacktrace,
460
+ ),
461
+ parent=execution.parent_context,
451
462
  )
452
-
453
463
  except Exception as e:
454
464
  error_message = self._parse_error_message(e)
455
465
  if error_message is None:
@@ -459,19 +469,17 @@ class WorkflowRunner(Generic[StateType]):
459
469
  else:
460
470
  error_code = WorkflowErrorCode.NODE_EXECUTION
461
471
 
462
- self._workflow_event_inner_queue.put(
463
- NodeExecutionRejectedEvent(
464
- trace_id=execution.trace_id,
465
- span_id=span_id,
466
- body=NodeExecutionRejectedBody(
467
- node_definition=node.__class__,
468
- error=WorkflowError(
469
- message=error_message,
470
- code=error_code,
471
- ),
472
+ yield NodeExecutionRejectedEvent(
473
+ trace_id=execution.trace_id,
474
+ span_id=span_id,
475
+ body=NodeExecutionRejectedBody(
476
+ node_definition=node.__class__,
477
+ error=WorkflowError(
478
+ message=error_message,
479
+ code=error_code,
472
480
  ),
473
- parent=execution.parent_context,
474
481
  ),
482
+ parent=execution.parent_context,
475
483
  )
476
484
 
477
485
  logger.debug(f"Finished running node: {node.__class__.__name__}")
@@ -911,20 +919,20 @@ class WorkflowRunner(Generic[StateType]):
911
919
  return False
912
920
 
913
921
  def _generate_events(self) -> Generator[WorkflowEvent, None, None]:
914
- background_thread = Thread(
922
+ self._background_thread = Thread(
915
923
  target=self._run_background_thread,
916
924
  name=f"{self.workflow.__class__.__name__}.background_thread",
917
925
  )
918
- background_thread.start()
926
+ self._background_thread.start()
919
927
 
920
928
  cancel_thread_kill_switch = ThreadingEvent()
921
929
  if self._cancel_signal:
922
- cancel_thread = Thread(
930
+ self._cancel_thread = Thread(
923
931
  target=self._run_cancel_thread,
924
932
  name=f"{self.workflow.__class__.__name__}.cancel_thread",
925
933
  kwargs={"kill_switch": cancel_thread_kill_switch},
926
934
  )
927
- cancel_thread.start()
935
+ self._cancel_thread.start()
928
936
 
929
937
  event: WorkflowEvent
930
938
  if self._is_resuming:
@@ -935,13 +943,13 @@ class WorkflowRunner(Generic[StateType]):
935
943
  yield self._emit_event(event)
936
944
 
937
945
  # The extra level of indirection prevents the runner from waiting on the caller to consume the event stream
938
- stream_thread = Thread(
946
+ self._stream_thread = Thread(
939
947
  target=self._stream,
940
948
  name=f"{self.workflow.__class__.__name__}.stream_thread",
941
949
  )
942
- stream_thread.start()
950
+ self._stream_thread.start()
943
951
 
944
- while stream_thread.is_alive():
952
+ while self._stream_thread.is_alive():
945
953
  try:
946
954
  event = self._workflow_event_outer_queue.get(timeout=0.1)
947
955
  except Empty:
@@ -971,3 +979,17 @@ class WorkflowRunner(Generic[StateType]):
971
979
 
972
980
  def stream(self) -> WorkflowEventStream:
973
981
  return WorkflowEventGenerator(self._generate_events(), self._initial_state.meta.span_id)
982
+
983
+ def join(self) -> None:
984
+ """
985
+ Wait for all background threads to complete.
986
+ This ensures all pending work is finished before the runner terminates.
987
+ """
988
+ if self._stream_thread and self._stream_thread.is_alive():
989
+ self._stream_thread.join()
990
+
991
+ if self._background_thread and self._background_thread.is_alive():
992
+ self._background_thread.join()
993
+
994
+ if self._cancel_thread and self._cancel_thread.is_alive():
995
+ self._cancel_thread.join()
@@ -97,3 +97,32 @@ def test_dataset_row_with_default_inputs():
97
97
  assert serialized_dict["label"] == "defaults_test"
98
98
  assert serialized_dict["inputs"]["required_field"] == "required_value"
99
99
  assert serialized_dict["inputs"]["optional_with_default"] == "default_value"
100
+
101
+
102
+ def test_dataset_row_without_inputs():
103
+ """
104
+ Test that DatasetRow can be created with only a label (no inputs parameter).
105
+ """
106
+
107
+ dataset_row = DatasetRow(label="test_label_only")
108
+
109
+ serialized_dict = dataset_row.model_dump()
110
+
111
+ assert serialized_dict["label"] == "test_label_only"
112
+ assert serialized_dict["inputs"] == {}
113
+
114
+ assert isinstance(dataset_row.inputs, BaseInputs)
115
+
116
+
117
+ def test_dataset_row_with_empty_inputs():
118
+ """
119
+ Test that DatasetRow can be created with explicitly empty BaseInputs.
120
+ """
121
+
122
+ # GIVEN a DatasetRow with explicitly empty BaseInputs
123
+ dataset_row = DatasetRow(label="test_label", inputs=BaseInputs())
124
+
125
+ serialized_dict = dataset_row.model_dump()
126
+
127
+ assert serialized_dict["label"] == "test_label"
128
+ assert serialized_dict["inputs"] == {}
@@ -13,7 +13,12 @@ from typing import ( # type: ignore[attr-defined]
13
13
  )
14
14
 
15
15
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
16
- from vellum.workflows.types.definition import ComposioToolDefinition, DeploymentDefinition, MCPServer
16
+ from vellum.workflows.types.definition import (
17
+ ComposioToolDefinition,
18
+ DeploymentDefinition,
19
+ MCPServer,
20
+ VellumIntegrationToolDefinition,
21
+ )
17
22
 
18
23
  if TYPE_CHECKING:
19
24
  from vellum.workflows.workflows.base import BaseWorkflow
@@ -51,5 +56,11 @@ class ConditionType(Enum):
51
56
 
52
57
 
53
58
  # Type alias for functions that can be called in tool calling nodes
54
- ToolBase = Union[Callable[..., Any], DeploymentDefinition, Type["BaseWorkflow"], ComposioToolDefinition]
59
+ ToolBase = Union[
60
+ Callable[..., Any],
61
+ DeploymentDefinition,
62
+ Type["BaseWorkflow"],
63
+ ComposioToolDefinition,
64
+ VellumIntegrationToolDefinition,
65
+ ]
55
66
  Tool = Union[ToolBase, MCPServer]
@@ -10,7 +10,7 @@ from vellum import Vellum
10
10
  from vellum.client.core.pydantic_utilities import UniversalBaseModel
11
11
  from vellum.client.types.code_resource_definition import CodeResourceDefinition as ClientCodeResourceDefinition
12
12
  from vellum.client.types.vellum_variable import VellumVariable
13
- from vellum.workflows.constants import AuthorizationType
13
+ from vellum.workflows.constants import AuthorizationType, VellumIntegrationProviderType
14
14
  from vellum.workflows.references.environment_variable import EnvironmentVariableReference
15
15
 
16
16
 
@@ -165,6 +165,18 @@ class ComposioToolDefinition(UniversalBaseModel):
165
165
  self.name = self.action.lower()
166
166
 
167
167
 
168
+ class VellumIntegrationToolDefinition(UniversalBaseModel):
169
+ type: Literal["INTEGRATION"] = "INTEGRATION"
170
+
171
+ # Core identification
172
+ provider: VellumIntegrationProviderType
173
+ integration: str # "GITHUB", "SLACK", etc.
174
+ name: str # Specific action like "GITHUB_CREATE_AN_ISSUE"
175
+
176
+ # Required for tool base consistency
177
+ description: str
178
+
179
+
168
180
  class MCPServer(UniversalBaseModel):
169
181
  type: Literal["MCP_SERVER"] = "MCP_SERVER"
170
182
  name: str
@@ -1,19 +1,6 @@
1
1
  import dataclasses
2
2
  import inspect
3
- from typing import (
4
- TYPE_CHECKING,
5
- Annotated,
6
- Any,
7
- Callable,
8
- Dict,
9
- List,
10
- Literal,
11
- Optional,
12
- Type,
13
- Union,
14
- get_args,
15
- get_origin,
16
- )
3
+ from typing import TYPE_CHECKING, Annotated, Any, Callable, List, Literal, Optional, Type, Union, get_args, get_origin
17
4
 
18
5
  from pydantic import BaseModel
19
6
  from pydantic_core import PydanticUndefined
@@ -21,14 +8,21 @@ from pydash import snake_case
21
8
 
22
9
  from vellum import Vellum
23
10
  from vellum.client.types.function_definition import FunctionDefinition
11
+ from vellum.workflows.integrations.composio_service import ComposioService
24
12
  from vellum.workflows.integrations.mcp_service import MCPService
25
- from vellum.workflows.types.definition import MCPServer, MCPToolDefinition
13
+ from vellum.workflows.types.definition import (
14
+ ComposioToolDefinition,
15
+ DeploymentDefinition,
16
+ MCPServer,
17
+ MCPToolDefinition,
18
+ VellumIntegrationToolDefinition,
19
+ )
26
20
  from vellum.workflows.utils.vellum_variables import vellum_variable_type_to_openapi_type
27
21
 
28
22
  if TYPE_CHECKING:
29
23
  from vellum.workflows.workflows.base import BaseWorkflow
30
24
 
31
- type_map = {
25
+ type_map: dict[Any, str] = {
32
26
  str: "string",
33
27
  int: "integer",
34
28
  float: "number",
@@ -38,8 +32,13 @@ type_map = {
38
32
  None: "null",
39
33
  type(None): "null",
40
34
  inspect._empty: "null",
35
+ "None": "null",
41
36
  }
42
37
 
38
+ for k, v in list(type_map.items()):
39
+ if isinstance(k, type):
40
+ type_map[k.__name__] = v
41
+
43
42
 
44
43
  def compile_annotation(annotation: Optional[Any], defs: dict[str, Any]) -> dict:
45
44
  if annotation is None:
@@ -238,25 +237,21 @@ def compile_inline_workflow_function_definition(workflow_class: Type["BaseWorkfl
238
237
 
239
238
 
240
239
  def compile_workflow_deployment_function_definition(
241
- deployment_config: Dict[str, str],
240
+ deployment_definition: DeploymentDefinition,
242
241
  vellum_client: Vellum,
243
242
  ) -> FunctionDefinition:
244
243
  """
245
244
  Converts a deployment workflow config into our Vellum-native FunctionDefinition type.
246
245
 
247
246
  Args:
248
- deployment_config: Dict with 'deployment' and 'release_tag' keys
247
+ deployment_definition: DeploymentDefinition instance
249
248
  vellum_client: Vellum client instance
250
249
  """
251
- deployment = deployment_config["deployment"]
252
- release_tag = deployment_config["release_tag"]
250
+ release_info = deployment_definition.get_release_info(vellum_client)
253
251
 
254
- workflow_deployment_release = vellum_client.workflow_deployments.retrieve_workflow_deployment_release(
255
- deployment, release_tag
256
- )
257
-
258
- input_variables = workflow_deployment_release.workflow_version.input_variables
259
- description = workflow_deployment_release.description
252
+ name = release_info["name"]
253
+ description = release_info["description"]
254
+ input_variables = release_info["input_variables"]
260
255
 
261
256
  properties = {}
262
257
  required = []
@@ -270,7 +265,7 @@ def compile_workflow_deployment_function_definition(
270
265
  parameters = {"type": "object", "properties": properties, "required": required}
271
266
 
272
267
  return FunctionDefinition(
273
- name=deployment.replace("-", ""),
268
+ name=name.replace("-", ""),
274
269
  description=description,
275
270
  parameters=parameters,
276
271
  )
@@ -300,6 +295,53 @@ def compile_mcp_tool_definition(server_def: MCPServer) -> List[MCPToolDefinition
300
295
  return []
301
296
 
302
297
 
298
+ def compile_composio_tool_definition(tool_def: ComposioToolDefinition) -> FunctionDefinition:
299
+ """Hydrate a ComposioToolDefinition with detailed information from the Composio API.
300
+
301
+ Args:
302
+ tool_def: The basic ComposioToolDefinition to enhance
303
+
304
+ Returns:
305
+ FunctionDefinition with detailed parameters and description
306
+ """
307
+ try:
308
+ composio_service = ComposioService()
309
+ tool_details = composio_service.get_tool_by_slug(tool_def.action)
310
+
311
+ # Create a FunctionDefinition directly with proper field extraction
312
+ return FunctionDefinition(
313
+ name=tool_def.name,
314
+ description=tool_details.get("description", tool_def.description),
315
+ parameters=tool_details.get("input_parameters", {}),
316
+ )
317
+ except Exception:
318
+ # If hydration fails (including no API key), return basic function definition
319
+ return FunctionDefinition(
320
+ name=tool_def.name,
321
+ description=tool_def.description,
322
+ parameters={},
323
+ )
324
+
325
+
326
+ def compile_vellum_integration_tool_definition(tool_def: VellumIntegrationToolDefinition) -> FunctionDefinition:
327
+ """Compile a VellumIntegrationToolDefinition into a FunctionDefinition.
328
+
329
+ TODO: Implement when VellumIntegrationService is created.
330
+
331
+ Args:
332
+ tool_def: The VellumIntegrationToolDefinition to compile
333
+
334
+ Returns:
335
+ FunctionDefinition with tool parameters and description
336
+ """
337
+ # TODO: Implement when VellumIntegrationService is available
338
+ # This will eventually use VellumIntegrationService to fetch tool details
339
+ raise NotImplementedError(
340
+ "VellumIntegrationToolDefinition compilation coming soon. "
341
+ "This will be implemented when the VellumIntegrationService is created."
342
+ )
343
+
344
+
303
345
  def use_tool_inputs(**inputs):
304
346
  """
305
347
  Decorator to specify which parameters of a tool function should be provided