vellum-ai 0.2.2__py3-none-any.whl → 0.3.2__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. vellum/__init__.py +42 -40
  2. vellum/client.py +6 -52
  3. vellum/core/client_wrapper.py +1 -1
  4. vellum/resources/deployments/client.py +2 -6
  5. vellum/resources/document_indexes/client.py +50 -6
  6. vellum/resources/documents/client.py +16 -0
  7. vellum/resources/registered_prompts/client.py +4 -44
  8. vellum/resources/sandboxes/client.py +20 -47
  9. vellum/types/__init__.py +47 -43
  10. vellum/types/api_node_result.py +4 -0
  11. vellum/types/code_execution_node_result.py +4 -0
  12. vellum/types/conditional_node_result.py +4 -0
  13. vellum/types/{array_variable_value.py → fulfilled_workflow_node_result_event.py} +13 -9
  14. vellum/types/initiated_workflow_node_result_event.py +39 -0
  15. vellum/types/{chat_history_variable_value.py → node_output_compiled_chat_history_value.py} +2 -1
  16. vellum/types/node_output_compiled_error_value.py +30 -0
  17. vellum/types/{sandbox_metric_input_params.py → node_output_compiled_json_value.py} +3 -3
  18. vellum/types/{number_variable_value.py → node_output_compiled_number_value.py} +2 -1
  19. vellum/types/{search_results_variable_value.py → node_output_compiled_search_results_value.py} +2 -1
  20. vellum/types/node_output_compiled_string_value.py +29 -0
  21. vellum/types/node_output_compiled_value.py +78 -0
  22. vellum/types/prompt_node_result.py +4 -0
  23. vellum/types/{workflow_output_array.py → rejected_workflow_node_result_event.py} +9 -5
  24. vellum/types/sandbox_scenario.py +0 -2
  25. vellum/types/search_node_result.py +4 -0
  26. vellum/types/{evaluation_params.py → streaming_workflow_node_result_event.py} +14 -4
  27. vellum/types/templating_node_result.py +4 -0
  28. vellum/types/terminal_node_result.py +4 -0
  29. vellum/types/workflow_execution_node_result_event.py +4 -0
  30. vellum/types/workflow_execution_workflow_result_event.py +4 -0
  31. vellum/types/workflow_node_result_event.py +43 -26
  32. vellum/types/workflow_output.py +0 -11
  33. vellum/types/workflow_result_event_output_data_chat_history.py +4 -0
  34. vellum/types/workflow_result_event_output_data_error.py +4 -0
  35. vellum/types/workflow_result_event_output_data_json.py +4 -0
  36. vellum/types/workflow_result_event_output_data_number.py +4 -0
  37. vellum/types/workflow_result_event_output_data_search_results.py +4 -0
  38. vellum/types/workflow_result_event_output_data_string.py +4 -0
  39. {vellum_ai-0.2.2.dist-info → vellum_ai-0.3.2.dist-info}/METADATA +1 -1
  40. {vellum_ai-0.2.2.dist-info → vellum_ai-0.3.2.dist-info}/RECORD +42 -42
  41. vellum/types/array_enum.py +0 -5
  42. vellum/types/evaluation_params_request.py +0 -30
  43. vellum/types/sandbox_metric_input_params_request.py +0 -29
  44. vellum/types/variable_value.py +0 -102
  45. {vellum_ai-0.2.2.dist-info → vellum_ai-0.3.2.dist-info}/LICENSE +0 -0
  46. {vellum_ai-0.2.2.dist-info → vellum_ai-0.3.2.dist-info}/WHEEL +0 -0
@@ -0,0 +1,78 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ from __future__ import annotations
4
+
5
+ import typing
6
+
7
+ import typing_extensions
8
+
9
+ from .node_output_compiled_chat_history_value import NodeOutputCompiledChatHistoryValue
10
+ from .node_output_compiled_error_value import NodeOutputCompiledErrorValue
11
+ from .node_output_compiled_json_value import NodeOutputCompiledJsonValue
12
+ from .node_output_compiled_number_value import NodeOutputCompiledNumberValue
13
+ from .node_output_compiled_search_results_value import NodeOutputCompiledSearchResultsValue
14
+ from .node_output_compiled_string_value import NodeOutputCompiledStringValue
15
+
16
+
17
+ class NodeOutputCompiledValue_String(NodeOutputCompiledStringValue):
18
+ type: typing_extensions.Literal["STRING"]
19
+
20
+ class Config:
21
+ frozen = True
22
+ smart_union = True
23
+ allow_population_by_field_name = True
24
+
25
+
26
+ class NodeOutputCompiledValue_Number(NodeOutputCompiledNumberValue):
27
+ type: typing_extensions.Literal["NUMBER"]
28
+
29
+ class Config:
30
+ frozen = True
31
+ smart_union = True
32
+ allow_population_by_field_name = True
33
+
34
+
35
+ class NodeOutputCompiledValue_Json(NodeOutputCompiledJsonValue):
36
+ type: typing_extensions.Literal["JSON"]
37
+
38
+ class Config:
39
+ frozen = True
40
+ smart_union = True
41
+ allow_population_by_field_name = True
42
+
43
+
44
+ class NodeOutputCompiledValue_ChatHistory(NodeOutputCompiledChatHistoryValue):
45
+ type: typing_extensions.Literal["CHAT_HISTORY"]
46
+
47
+ class Config:
48
+ frozen = True
49
+ smart_union = True
50
+ allow_population_by_field_name = True
51
+
52
+
53
+ class NodeOutputCompiledValue_SearchResults(NodeOutputCompiledSearchResultsValue):
54
+ type: typing_extensions.Literal["SEARCH_RESULTS"]
55
+
56
+ class Config:
57
+ frozen = True
58
+ smart_union = True
59
+ allow_population_by_field_name = True
60
+
61
+
62
+ class NodeOutputCompiledValue_Error(NodeOutputCompiledErrorValue):
63
+ type: typing_extensions.Literal["ERROR"]
64
+
65
+ class Config:
66
+ frozen = True
67
+ smart_union = True
68
+ allow_population_by_field_name = True
69
+
70
+
71
+ NodeOutputCompiledValue = typing.Union[
72
+ NodeOutputCompiledValue_String,
73
+ NodeOutputCompiledValue_Number,
74
+ NodeOutputCompiledValue_Json,
75
+ NodeOutputCompiledValue_ChatHistory,
76
+ NodeOutputCompiledValue_SearchResults,
77
+ NodeOutputCompiledValue_Error,
78
+ ]
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class PromptNodeResult(pydantic.BaseModel):
16
+ """
17
+ A Node Result Event emitted from a Prompt Node.
18
+ """
19
+
16
20
  data: PromptNodeResultData
17
21
 
18
22
  def json(self, **kwargs: typing.Any) -> str:
@@ -4,7 +4,8 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .variable_value import VariableValue
7
+ from .workflow_event_error import WorkflowEventError
8
+ from .workflow_node_result_data import WorkflowNodeResultData
8
9
 
9
10
  try:
10
11
  import pydantic.v1 as pydantic # type: ignore
@@ -12,14 +13,17 @@ except ImportError:
12
13
  import pydantic # type: ignore
13
14
 
14
15
 
15
- class WorkflowOutputArray(pydantic.BaseModel):
16
+ class RejectedWorkflowNodeResultEvent(pydantic.BaseModel):
16
17
  """
17
- An array of outputs from a Workflow execution.
18
+ An event that indicates that the node has rejected its execution.
18
19
  """
19
20
 
20
21
  id: str
21
- name: str = pydantic.Field(description="The output's name, as defined in the workflow")
22
- value: typing.List[VariableValue]
22
+ node_id: str
23
+ node_result_id: str
24
+ ts: typing.Optional[dt.datetime]
25
+ data: typing.Optional[WorkflowNodeResultData]
26
+ error: WorkflowEventError
23
27
 
24
28
  def json(self, **kwargs: typing.Any) -> str:
25
29
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -4,7 +4,6 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
- from .sandbox_metric_input_params import SandboxMetricInputParams
8
7
  from .scenario_input import ScenarioInput
9
8
 
10
9
  try:
@@ -17,7 +16,6 @@ class SandboxScenario(pydantic.BaseModel):
17
16
  label: typing.Optional[str]
18
17
  inputs: typing.List[ScenarioInput] = pydantic.Field(description="The inputs for the scenario")
19
18
  id: str = pydantic.Field(description="The id of the scenario")
20
- metric_input_params: SandboxMetricInputParams
21
19
 
22
20
  def json(self, **kwargs: typing.Any) -> str:
23
21
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class SearchNodeResult(pydantic.BaseModel):
16
+ """
17
+ A Node Result Event emitted from a Search Node.
18
+ """
19
+
16
20
  data: SearchNodeResultData
17
21
 
18
22
  def json(self, **kwargs: typing.Any) -> str:
@@ -4,6 +4,8 @@ import datetime as dt
4
4
  import typing
5
5
 
6
6
  from ..core.datetime_utils import serialize_datetime
7
+ from .node_output_compiled_value import NodeOutputCompiledValue
8
+ from .workflow_node_result_data import WorkflowNodeResultData
7
9
 
8
10
  try:
9
11
  import pydantic.v1 as pydantic # type: ignore
@@ -11,10 +13,18 @@ except ImportError:
11
13
  import pydantic # type: ignore
12
14
 
13
15
 
14
- class EvaluationParams(pydantic.BaseModel):
15
- target: typing.Optional[str] = pydantic.Field(
16
- description="The target value to compare the LLM output against. Typically what you expect or desire the LLM output to be."
17
- )
16
+ class StreamingWorkflowNodeResultEvent(pydantic.BaseModel):
17
+ """
18
+ An event that indicates that the node has execution is in progress.
19
+ """
20
+
21
+ id: str
22
+ node_id: str
23
+ node_result_id: str
24
+ ts: typing.Optional[dt.datetime]
25
+ data: typing.Optional[WorkflowNodeResultData]
26
+ output: typing.Optional[NodeOutputCompiledValue]
27
+ output_index: typing.Optional[int]
18
28
 
19
29
  def json(self, **kwargs: typing.Any) -> str:
20
30
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class TemplatingNodeResult(pydantic.BaseModel):
16
+ """
17
+ A Node Result Event emitted from a Templating Node.
18
+ """
19
+
16
20
  data: TemplatingNodeResultData
17
21
 
18
22
  def json(self, **kwargs: typing.Any) -> str:
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class TerminalNodeResult(pydantic.BaseModel):
16
+ """
17
+ A Node Result Event emitted from a Terminal Node.
18
+ """
19
+
16
20
  data: TerminalNodeResultData
17
21
 
18
22
  def json(self, **kwargs: typing.Any) -> str:
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class WorkflowExecutionNodeResultEvent(pydantic.BaseModel):
16
+ """
17
+ A NODE-level event emitted from the workflow's execution.
18
+ """
19
+
16
20
  execution_id: str
17
21
  run_id: typing.Optional[str]
18
22
  external_id: typing.Optional[str]
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class WorkflowExecutionWorkflowResultEvent(pydantic.BaseModel):
16
+ """
17
+ A WORKFLOW-level event emitted from the workflow's execution.
18
+ """
19
+
16
20
  execution_id: str
17
21
  run_id: typing.Optional[str]
18
22
  external_id: typing.Optional[str]
@@ -1,39 +1,56 @@
1
1
  # This file was auto-generated by Fern from our API Definition.
2
2
 
3
- import datetime as dt
3
+ from __future__ import annotations
4
+
4
5
  import typing
5
6
 
6
- from ..core.datetime_utils import serialize_datetime
7
- from .node_input_variable_compiled_value import NodeInputVariableCompiledValue
8
- from .workflow_event_error import WorkflowEventError
9
- from .workflow_node_result_data import WorkflowNodeResultData
10
- from .workflow_node_result_event_state import WorkflowNodeResultEventState
7
+ import typing_extensions
8
+
9
+ from .fulfilled_workflow_node_result_event import FulfilledWorkflowNodeResultEvent
10
+ from .initiated_workflow_node_result_event import InitiatedWorkflowNodeResultEvent
11
+ from .rejected_workflow_node_result_event import RejectedWorkflowNodeResultEvent
12
+ from .streaming_workflow_node_result_event import StreamingWorkflowNodeResultEvent
11
13
 
12
- try:
13
- import pydantic.v1 as pydantic # type: ignore
14
- except ImportError:
15
- import pydantic # type: ignore
16
14
 
15
+ class WorkflowNodeResultEvent_Initiated(InitiatedWorkflowNodeResultEvent):
16
+ state: typing_extensions.Literal["INITIATED"]
17
17
 
18
- class WorkflowNodeResultEvent(pydantic.BaseModel):
19
- id: str
20
- node_id: str
21
- node_result_id: str
22
- state: WorkflowNodeResultEventState
23
- ts: typing.Optional[dt.datetime]
24
- data: typing.Optional[WorkflowNodeResultData]
25
- error: typing.Optional[WorkflowEventError]
26
- input_values: typing.Optional[typing.List[NodeInputVariableCompiledValue]]
18
+ class Config:
19
+ frozen = True
20
+ smart_union = True
21
+ allow_population_by_field_name = True
27
22
 
28
- def json(self, **kwargs: typing.Any) -> str:
29
- kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
30
- return super().json(**kwargs_with_defaults)
31
23
 
32
- def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
33
- kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
34
- return super().dict(**kwargs_with_defaults)
24
+ class WorkflowNodeResultEvent_Streaming(StreamingWorkflowNodeResultEvent):
25
+ state: typing_extensions.Literal["STREAMING"]
35
26
 
36
27
  class Config:
37
28
  frozen = True
38
29
  smart_union = True
39
- json_encoders = {dt.datetime: serialize_datetime}
30
+ allow_population_by_field_name = True
31
+
32
+
33
+ class WorkflowNodeResultEvent_Fulfilled(FulfilledWorkflowNodeResultEvent):
34
+ state: typing_extensions.Literal["FULFILLED"]
35
+
36
+ class Config:
37
+ frozen = True
38
+ smart_union = True
39
+ allow_population_by_field_name = True
40
+
41
+
42
+ class WorkflowNodeResultEvent_Rejected(RejectedWorkflowNodeResultEvent):
43
+ state: typing_extensions.Literal["REJECTED"]
44
+
45
+ class Config:
46
+ frozen = True
47
+ smart_union = True
48
+ allow_population_by_field_name = True
49
+
50
+
51
+ WorkflowNodeResultEvent = typing.Union[
52
+ WorkflowNodeResultEvent_Initiated,
53
+ WorkflowNodeResultEvent_Streaming,
54
+ WorkflowNodeResultEvent_Fulfilled,
55
+ WorkflowNodeResultEvent_Rejected,
56
+ ]
@@ -6,7 +6,6 @@ import typing
6
6
 
7
7
  import typing_extensions
8
8
 
9
- from .workflow_output_array import WorkflowOutputArray
10
9
  from .workflow_output_chat_history import WorkflowOutputChatHistory
11
10
  from .workflow_output_error import WorkflowOutputError
12
11
  from .workflow_output_function_call import WorkflowOutputFunctionCall
@@ -71,15 +70,6 @@ class WorkflowOutput_Error(WorkflowOutputError):
71
70
  allow_population_by_field_name = True
72
71
 
73
72
 
74
- class WorkflowOutput_Array(WorkflowOutputArray):
75
- type: typing_extensions.Literal["ARRAY"]
76
-
77
- class Config:
78
- frozen = True
79
- smart_union = True
80
- allow_population_by_field_name = True
81
-
82
-
83
73
  class WorkflowOutput_FunctionCall(WorkflowOutputFunctionCall):
84
74
  type: typing_extensions.Literal["FUNCTION_CALL"]
85
75
 
@@ -105,7 +95,6 @@ WorkflowOutput = typing.Union[
105
95
  WorkflowOutput_ChatHistory,
106
96
  WorkflowOutput_SearchResults,
107
97
  WorkflowOutput_Error,
108
- WorkflowOutput_Array,
109
98
  WorkflowOutput_FunctionCall,
110
99
  WorkflowOutput_Image,
111
100
  ]
@@ -14,6 +14,10 @@ except ImportError:
14
14
 
15
15
 
16
16
  class WorkflowResultEventOutputDataChatHistory(pydantic.BaseModel):
17
+ """
18
+ A Chat History output streamed from a Workflow execution.
19
+ """
20
+
17
21
  id: typing.Optional[str]
18
22
  name: str
19
23
  state: WorkflowNodeResultEventState
@@ -14,6 +14,10 @@ except ImportError:
14
14
 
15
15
 
16
16
  class WorkflowResultEventOutputDataError(pydantic.BaseModel):
17
+ """
18
+ An Error output streamed from a Workflow execution.
19
+ """
20
+
17
21
  id: typing.Optional[str]
18
22
  name: str
19
23
  state: WorkflowNodeResultEventState
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class WorkflowResultEventOutputDataJson(pydantic.BaseModel):
16
+ """
17
+ A JSON output streamed from a Workflow execution.
18
+ """
19
+
16
20
  id: typing.Optional[str]
17
21
  name: str
18
22
  state: WorkflowNodeResultEventState
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class WorkflowResultEventOutputDataNumber(pydantic.BaseModel):
16
+ """
17
+ A number output streamed from a Workflow execution.
18
+ """
19
+
16
20
  id: typing.Optional[str]
17
21
  name: str
18
22
  state: WorkflowNodeResultEventState
@@ -14,6 +14,10 @@ except ImportError:
14
14
 
15
15
 
16
16
  class WorkflowResultEventOutputDataSearchResults(pydantic.BaseModel):
17
+ """
18
+ A Search Results output streamed from a Workflow execution.
19
+ """
20
+
17
21
  id: typing.Optional[str]
18
22
  name: str
19
23
  state: WorkflowNodeResultEventState
@@ -13,6 +13,10 @@ except ImportError:
13
13
 
14
14
 
15
15
  class WorkflowResultEventOutputDataString(pydantic.BaseModel):
16
+ """
17
+ A string output streamed from a Workflow execution.
18
+ """
19
+
16
20
  id: typing.Optional[str]
17
21
  name: str
18
22
  state: WorkflowNodeResultEventState
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.2.2
3
+ Version: 0.3.2
4
4
  Summary:
5
5
  Requires-Python: >=3.7,<4.0
6
6
  Classifier: Programming Language :: Python :: 3