vellum-ai 0.5.1__py3-none-any.whl → 0.6.0__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- vellum/__init__.py +170 -20
- vellum/core/client_wrapper.py +1 -1
- vellum/resources/document_indexes/client.py +61 -52
- vellum/resources/documents/client.py +8 -4
- vellum/types/__init__.py +189 -19
- vellum/types/add_openai_api_key_enum.py +3 -0
- vellum/types/array_variable_value_item.py +0 -24
- vellum/types/array_vellum_value_item.py +82 -0
- vellum/types/basic_vectorizer_intfloat_multilingual_e_5_large.py +29 -0
- vellum/types/basic_vectorizer_intfloat_multilingual_e_5_large_request.py +29 -0
- vellum/types/basic_vectorizer_sentence_transformers_multi_qa_mpnet_base_cos_v_1.py +29 -0
- vellum/types/basic_vectorizer_sentence_transformers_multi_qa_mpnet_base_cos_v_1_request.py +29 -0
- vellum/types/basic_vectorizer_sentence_transformers_multi_qa_mpnet_base_dot_v_1.py +29 -0
- vellum/types/basic_vectorizer_sentence_transformers_multi_qa_mpnet_base_dot_v_1_request.py +29 -0
- vellum/types/document_index_chunking.py +46 -0
- vellum/types/document_index_chunking_request.py +46 -0
- vellum/types/document_index_indexing_config.py +28 -0
- vellum/types/document_index_indexing_config_request.py +28 -0
- vellum/types/document_index_read.py +2 -4
- vellum/types/{rejected_function_call.py → error_vellum_value.py} +3 -5
- vellum/types/function_call.py +20 -18
- vellum/types/{fulfilled_function_call_request.py → function_call_request.py} +2 -2
- vellum/types/function_call_variable_value.py +1 -1
- vellum/types/{node_output_compiled_function_value.py → function_call_vellum_value.py} +5 -2
- vellum/types/hkunlp_instructor_xl_enum.py +5 -0
- vellum/types/hkunlp_instructor_xl_vectorizer.py +30 -0
- vellum/types/hkunlp_instructor_xl_vectorizer_request.py +30 -0
- vellum/types/{fulfilled_function_call.py → image_vellum_value.py} +4 -5
- vellum/types/indexing_config_vectorizer.py +106 -0
- vellum/types/indexing_config_vectorizer_request.py +106 -0
- vellum/types/instructor_vectorizer_config.py +31 -0
- vellum/types/instructor_vectorizer_config_request.py +31 -0
- vellum/types/intfloat_multilingual_e_5_large_enum.py +5 -0
- vellum/types/json_vellum_value.py +29 -0
- vellum/types/metric_enum.py +5 -0
- vellum/types/{chat_history_variable_value.py → metric_node_result.py} +4 -3
- vellum/types/named_test_case_function_call_variable_value.py +2 -2
- vellum/types/named_test_case_function_call_variable_value_request.py +2 -2
- vellum/types/node_output_compiled_array_value.py +8 -2
- vellum/types/node_output_compiled_chat_history_value.py +7 -1
- vellum/types/node_output_compiled_error_value.py +7 -1
- vellum/types/node_output_compiled_function_call_value.py +33 -0
- vellum/types/node_output_compiled_json_value.py +7 -1
- vellum/types/node_output_compiled_number_value.py +7 -1
- vellum/types/node_output_compiled_search_results_value.py +7 -1
- vellum/types/node_output_compiled_string_value.py +7 -1
- vellum/types/node_output_compiled_value.py +2 -2
- vellum/types/{search_results_variable_value.py → number_vellum_value.py} +6 -3
- vellum/types/open_ai_vectorizer_config.py +30 -0
- vellum/types/open_ai_vectorizer_config_request.py +30 -0
- vellum/types/open_ai_vectorizer_text_embedding_3_large.py +30 -0
- vellum/types/open_ai_vectorizer_text_embedding_3_large_request.py +30 -0
- vellum/types/open_ai_vectorizer_text_embedding_3_small.py +30 -0
- vellum/types/open_ai_vectorizer_text_embedding_3_small_request.py +30 -0
- vellum/types/open_ai_vectorizer_text_embedding_ada_002.py +30 -0
- vellum/types/open_ai_vectorizer_text_embedding_ada_002_request.py +30 -0
- vellum/types/prompt_output.py +8 -8
- vellum/types/reducto_chunker_config.py +29 -0
- vellum/types/reducto_chunker_config_request.py +29 -0
- vellum/types/reducto_chunker_enum.py +5 -0
- vellum/types/reducto_chunking.py +30 -0
- vellum/types/reducto_chunking_request.py +30 -0
- vellum/types/sentence_chunker_config.py +30 -0
- vellum/types/sentence_chunker_config_request.py +30 -0
- vellum/types/sentence_chunker_enum.py +5 -0
- vellum/types/sentence_chunking.py +30 -0
- vellum/types/sentence_chunking_request.py +30 -0
- vellum/types/sentence_transformers_multi_qa_mpnet_base_cos_v_1_enum.py +5 -0
- vellum/types/sentence_transformers_multi_qa_mpnet_base_dot_v_1_enum.py +5 -0
- vellum/types/string_vellum_value.py +29 -0
- vellum/types/test_case_function_call_variable_value.py +2 -2
- vellum/types/test_suite_run_execution_function_call_output.py +2 -2
- vellum/types/text_embedding_3_large_enum.py +5 -0
- vellum/types/text_embedding_3_small_enum.py +5 -0
- vellum/types/text_embedding_ada_002_enum.py +5 -0
- vellum/types/token_overlapping_window_chunker_config.py +30 -0
- vellum/types/token_overlapping_window_chunker_config_request.py +30 -0
- vellum/types/token_overlapping_window_chunker_enum.py +5 -0
- vellum/types/token_overlapping_window_chunking.py +30 -0
- vellum/types/token_overlapping_window_chunking_request.py +30 -0
- vellum/types/workflow_execution_actual_chat_history_request.py +5 -0
- vellum/types/workflow_execution_actual_json_request.py +5 -0
- vellum/types/workflow_execution_actual_string_request.py +5 -0
- vellum/types/workflow_node_result_data.py +12 -0
- vellum/types/workflow_output_array.py +2 -2
- {vellum_ai-0.5.1.dist-info → vellum_ai-0.6.0.dist-info}/METADATA +1 -1
- {vellum_ai-0.5.1.dist-info → vellum_ai-0.6.0.dist-info}/RECORD +89 -37
- {vellum_ai-0.5.1.dist-info → vellum_ai-0.6.0.dist-info}/LICENSE +0 -0
- {vellum_ai-0.5.1.dist-info → vellum_ai-0.6.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,29 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
|
9
|
+
|
10
|
+
class BasicVectorizerSentenceTransformersMultiQaMpnetBaseCosV1Request(pydantic_v1.BaseModel):
|
11
|
+
"""
|
12
|
+
Basic vectorizer for sentence-transformers/multi-qa-mpnet-base-cos-v1.
|
13
|
+
"""
|
14
|
+
|
15
|
+
config: typing.Optional[typing.Dict[str, typing.Any]] = None
|
16
|
+
|
17
|
+
def json(self, **kwargs: typing.Any) -> str:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().json(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().dict(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
smart_union = True
|
28
|
+
extra = pydantic_v1.Extra.allow
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
|
9
|
+
|
10
|
+
class BasicVectorizerSentenceTransformersMultiQaMpnetBaseDotV1(pydantic_v1.BaseModel):
|
11
|
+
"""
|
12
|
+
Basic vectorizer for sentence-transformers/multi-qa-mpnet-base-dot-v1.
|
13
|
+
"""
|
14
|
+
|
15
|
+
config: typing.Optional[typing.Dict[str, typing.Any]] = None
|
16
|
+
|
17
|
+
def json(self, **kwargs: typing.Any) -> str:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().json(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().dict(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
smart_union = True
|
28
|
+
extra = pydantic_v1.Extra.allow
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
|
9
|
+
|
10
|
+
class BasicVectorizerSentenceTransformersMultiQaMpnetBaseDotV1Request(pydantic_v1.BaseModel):
|
11
|
+
"""
|
12
|
+
Basic vectorizer for sentence-transformers/multi-qa-mpnet-base-dot-v1.
|
13
|
+
"""
|
14
|
+
|
15
|
+
config: typing.Optional[typing.Dict[str, typing.Any]] = None
|
16
|
+
|
17
|
+
def json(self, **kwargs: typing.Any) -> str:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().json(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().dict(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
smart_union = True
|
28
|
+
extra = pydantic_v1.Extra.allow
|
29
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import typing
|
6
|
+
|
7
|
+
from .reducto_chunking import ReductoChunking
|
8
|
+
from .sentence_chunking import SentenceChunking
|
9
|
+
from .token_overlapping_window_chunking import TokenOverlappingWindowChunking
|
10
|
+
|
11
|
+
|
12
|
+
class DocumentIndexChunking_ReductoChunker(ReductoChunking):
|
13
|
+
chunker_name: typing.Literal["reducto-chunker"] = "reducto-chunker"
|
14
|
+
|
15
|
+
class Config:
|
16
|
+
frozen = True
|
17
|
+
smart_union = True
|
18
|
+
allow_population_by_field_name = True
|
19
|
+
populate_by_name = True
|
20
|
+
|
21
|
+
|
22
|
+
class DocumentIndexChunking_SentenceChunker(SentenceChunking):
|
23
|
+
chunker_name: typing.Literal["sentence-chunker"] = "sentence-chunker"
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
smart_union = True
|
28
|
+
allow_population_by_field_name = True
|
29
|
+
populate_by_name = True
|
30
|
+
|
31
|
+
|
32
|
+
class DocumentIndexChunking_TokenOverlappingWindowChunker(TokenOverlappingWindowChunking):
|
33
|
+
chunker_name: typing.Literal["token-overlapping-window-chunker"] = "token-overlapping-window-chunker"
|
34
|
+
|
35
|
+
class Config:
|
36
|
+
frozen = True
|
37
|
+
smart_union = True
|
38
|
+
allow_population_by_field_name = True
|
39
|
+
populate_by_name = True
|
40
|
+
|
41
|
+
|
42
|
+
DocumentIndexChunking = typing.Union[
|
43
|
+
DocumentIndexChunking_ReductoChunker,
|
44
|
+
DocumentIndexChunking_SentenceChunker,
|
45
|
+
DocumentIndexChunking_TokenOverlappingWindowChunker,
|
46
|
+
]
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import typing
|
6
|
+
|
7
|
+
from .reducto_chunking_request import ReductoChunkingRequest
|
8
|
+
from .sentence_chunking_request import SentenceChunkingRequest
|
9
|
+
from .token_overlapping_window_chunking_request import TokenOverlappingWindowChunkingRequest
|
10
|
+
|
11
|
+
|
12
|
+
class DocumentIndexChunkingRequest_ReductoChunker(ReductoChunkingRequest):
|
13
|
+
chunker_name: typing.Literal["reducto-chunker"] = "reducto-chunker"
|
14
|
+
|
15
|
+
class Config:
|
16
|
+
frozen = True
|
17
|
+
smart_union = True
|
18
|
+
allow_population_by_field_name = True
|
19
|
+
populate_by_name = True
|
20
|
+
|
21
|
+
|
22
|
+
class DocumentIndexChunkingRequest_SentenceChunker(SentenceChunkingRequest):
|
23
|
+
chunker_name: typing.Literal["sentence-chunker"] = "sentence-chunker"
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
smart_union = True
|
28
|
+
allow_population_by_field_name = True
|
29
|
+
populate_by_name = True
|
30
|
+
|
31
|
+
|
32
|
+
class DocumentIndexChunkingRequest_TokenOverlappingWindowChunker(TokenOverlappingWindowChunkingRequest):
|
33
|
+
chunker_name: typing.Literal["token-overlapping-window-chunker"] = "token-overlapping-window-chunker"
|
34
|
+
|
35
|
+
class Config:
|
36
|
+
frozen = True
|
37
|
+
smart_union = True
|
38
|
+
allow_population_by_field_name = True
|
39
|
+
populate_by_name = True
|
40
|
+
|
41
|
+
|
42
|
+
DocumentIndexChunkingRequest = typing.Union[
|
43
|
+
DocumentIndexChunkingRequest_ReductoChunker,
|
44
|
+
DocumentIndexChunkingRequest_SentenceChunker,
|
45
|
+
DocumentIndexChunkingRequest_TokenOverlappingWindowChunker,
|
46
|
+
]
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .document_index_chunking import DocumentIndexChunking
|
9
|
+
from .indexing_config_vectorizer import IndexingConfigVectorizer
|
10
|
+
|
11
|
+
|
12
|
+
class DocumentIndexIndexingConfig(pydantic_v1.BaseModel):
|
13
|
+
vectorizer: IndexingConfigVectorizer
|
14
|
+
chunking: typing.Optional[DocumentIndexChunking] = None
|
15
|
+
|
16
|
+
def json(self, **kwargs: typing.Any) -> str:
|
17
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
+
return super().json(**kwargs_with_defaults)
|
19
|
+
|
20
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
+
return super().dict(**kwargs_with_defaults)
|
23
|
+
|
24
|
+
class Config:
|
25
|
+
frozen = True
|
26
|
+
smart_union = True
|
27
|
+
extra = pydantic_v1.Extra.allow
|
28
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .document_index_chunking_request import DocumentIndexChunkingRequest
|
9
|
+
from .indexing_config_vectorizer_request import IndexingConfigVectorizerRequest
|
10
|
+
|
11
|
+
|
12
|
+
class DocumentIndexIndexingConfigRequest(pydantic_v1.BaseModel):
|
13
|
+
vectorizer: IndexingConfigVectorizerRequest
|
14
|
+
chunking: typing.Optional[DocumentIndexChunkingRequest] = None
|
15
|
+
|
16
|
+
def json(self, **kwargs: typing.Any) -> str:
|
17
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
+
return super().json(**kwargs_with_defaults)
|
19
|
+
|
20
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
+
return super().dict(**kwargs_with_defaults)
|
23
|
+
|
24
|
+
class Config:
|
25
|
+
frozen = True
|
26
|
+
smart_union = True
|
27
|
+
extra = pydantic_v1.Extra.allow
|
28
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -5,6 +5,7 @@ import typing
|
|
5
5
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
7
7
|
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .document_index_indexing_config import DocumentIndexIndexingConfig
|
8
9
|
from .entity_status import EntityStatus
|
9
10
|
from .environment_enum import EnvironmentEnum
|
10
11
|
|
@@ -39,10 +40,7 @@ class DocumentIndexRead(pydantic_v1.BaseModel):
|
|
39
40
|
- `PRODUCTION` - Production
|
40
41
|
"""
|
41
42
|
|
42
|
-
indexing_config:
|
43
|
-
"""
|
44
|
-
Configuration representing how documents should be indexed
|
45
|
-
"""
|
43
|
+
indexing_config: DocumentIndexIndexingConfig
|
46
44
|
|
47
45
|
def json(self, **kwargs: typing.Any) -> str:
|
48
46
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
@@ -8,14 +8,12 @@ from ..core.pydantic_utilities import pydantic_v1
|
|
8
8
|
from .vellum_error import VellumError
|
9
9
|
|
10
10
|
|
11
|
-
class
|
11
|
+
class ErrorVellumValue(pydantic_v1.BaseModel):
|
12
12
|
"""
|
13
|
-
|
13
|
+
A value representing an Error.
|
14
14
|
"""
|
15
15
|
|
16
|
-
|
17
|
-
id: typing.Optional[str] = None
|
18
|
-
name: str
|
16
|
+
value: typing.Optional[VellumError] = None
|
19
17
|
|
20
18
|
def json(self, **kwargs: typing.Any) -> str:
|
21
19
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
vellum/types/function_call.py
CHANGED
@@ -1,31 +1,33 @@
|
|
1
1
|
# This file was auto-generated by Fern from our API Definition.
|
2
2
|
|
3
|
-
|
4
|
-
|
3
|
+
import datetime as dt
|
5
4
|
import typing
|
6
5
|
|
7
|
-
from .
|
8
|
-
from .
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .fulfilled_enum import FulfilledEnum
|
9
9
|
|
10
10
|
|
11
|
-
class
|
12
|
-
|
11
|
+
class FunctionCall(pydantic_v1.BaseModel):
|
12
|
+
"""
|
13
|
+
The final resolved function call value.
|
14
|
+
"""
|
13
15
|
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
populate_by_name = True
|
16
|
+
arguments: typing.Dict[str, typing.Any]
|
17
|
+
id: typing.Optional[str] = None
|
18
|
+
name: str
|
19
|
+
state: typing.Optional[FulfilledEnum] = None
|
19
20
|
|
21
|
+
def json(self, **kwargs: typing.Any) -> str:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().json(**kwargs_with_defaults)
|
20
24
|
|
21
|
-
|
22
|
-
|
25
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
26
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
27
|
+
return super().dict(**kwargs_with_defaults)
|
23
28
|
|
24
29
|
class Config:
|
25
30
|
frozen = True
|
26
31
|
smart_union = True
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
FunctionCall = typing.Union[FunctionCall_Fulfilled, FunctionCall_Rejected]
|
32
|
+
extra = pydantic_v1.Extra.allow
|
33
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -8,15 +8,15 @@ from ..core.pydantic_utilities import pydantic_v1
|
|
8
8
|
from .fulfilled_enum import FulfilledEnum
|
9
9
|
|
10
10
|
|
11
|
-
class
|
11
|
+
class FunctionCallRequest(pydantic_v1.BaseModel):
|
12
12
|
"""
|
13
13
|
The final resolved function call value.
|
14
14
|
"""
|
15
15
|
|
16
|
-
state: FulfilledEnum
|
17
16
|
arguments: typing.Dict[str, typing.Any]
|
18
17
|
id: typing.Optional[str] = None
|
19
18
|
name: str
|
19
|
+
state: typing.Optional[FulfilledEnum] = None
|
20
20
|
|
21
21
|
def json(self, **kwargs: typing.Any) -> str:
|
22
22
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
@@ -9,7 +9,7 @@ from .function_call import FunctionCall
|
|
9
9
|
|
10
10
|
|
11
11
|
class FunctionCallVariableValue(pydantic_v1.BaseModel):
|
12
|
-
value: FunctionCall
|
12
|
+
value: typing.Optional[FunctionCall] = None
|
13
13
|
|
14
14
|
def json(self, **kwargs: typing.Any) -> str:
|
15
15
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
@@ -8,8 +8,11 @@ from ..core.pydantic_utilities import pydantic_v1
|
|
8
8
|
from .function_call import FunctionCall
|
9
9
|
|
10
10
|
|
11
|
-
class
|
12
|
-
|
11
|
+
class FunctionCallVellumValue(pydantic_v1.BaseModel):
|
12
|
+
"""
|
13
|
+
A value representing a Function Call.
|
14
|
+
"""
|
15
|
+
|
13
16
|
value: typing.Optional[FunctionCall] = None
|
14
17
|
|
15
18
|
def json(self, **kwargs: typing.Any) -> str:
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .instructor_vectorizer_config import InstructorVectorizerConfig
|
9
|
+
|
10
|
+
|
11
|
+
class HkunlpInstructorXlVectorizer(pydantic_v1.BaseModel):
|
12
|
+
"""
|
13
|
+
Vectorizer for hkunlp/instructor-xl.
|
14
|
+
"""
|
15
|
+
|
16
|
+
config: InstructorVectorizerConfig
|
17
|
+
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().json(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
25
|
+
|
26
|
+
class Config:
|
27
|
+
frozen = True
|
28
|
+
smart_union = True
|
29
|
+
extra = pydantic_v1.Extra.allow
|
30
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
from ..core.datetime_utils import serialize_datetime
|
7
|
+
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .instructor_vectorizer_config_request import InstructorVectorizerConfigRequest
|
9
|
+
|
10
|
+
|
11
|
+
class HkunlpInstructorXlVectorizerRequest(pydantic_v1.BaseModel):
|
12
|
+
"""
|
13
|
+
Vectorizer for hkunlp/instructor-xl.
|
14
|
+
"""
|
15
|
+
|
16
|
+
config: InstructorVectorizerConfigRequest
|
17
|
+
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().json(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
25
|
+
|
26
|
+
class Config:
|
27
|
+
frozen = True
|
28
|
+
smart_union = True
|
29
|
+
extra = pydantic_v1.Extra.allow
|
30
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -5,16 +5,15 @@ import typing
|
|
5
5
|
|
6
6
|
from ..core.datetime_utils import serialize_datetime
|
7
7
|
from ..core.pydantic_utilities import pydantic_v1
|
8
|
+
from .vellum_image import VellumImage
|
8
9
|
|
9
10
|
|
10
|
-
class
|
11
|
+
class ImageVellumValue(pydantic_v1.BaseModel):
|
11
12
|
"""
|
12
|
-
|
13
|
+
A base Vellum primitive value representing an image.
|
13
14
|
"""
|
14
15
|
|
15
|
-
|
16
|
-
id: typing.Optional[str] = None
|
17
|
-
name: str
|
16
|
+
value: typing.Optional[VellumImage] = None
|
18
17
|
|
19
18
|
def json(self, **kwargs: typing.Any) -> str:
|
20
19
|
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
@@ -0,0 +1,106 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
from __future__ import annotations
|
4
|
+
|
5
|
+
import typing
|
6
|
+
|
7
|
+
from .basic_vectorizer_intfloat_multilingual_e_5_large import BasicVectorizerIntfloatMultilingualE5Large
|
8
|
+
from .basic_vectorizer_sentence_transformers_multi_qa_mpnet_base_cos_v_1 import (
|
9
|
+
BasicVectorizerSentenceTransformersMultiQaMpnetBaseCosV1,
|
10
|
+
)
|
11
|
+
from .basic_vectorizer_sentence_transformers_multi_qa_mpnet_base_dot_v_1 import (
|
12
|
+
BasicVectorizerSentenceTransformersMultiQaMpnetBaseDotV1,
|
13
|
+
)
|
14
|
+
from .hkunlp_instructor_xl_vectorizer import HkunlpInstructorXlVectorizer
|
15
|
+
from .open_ai_vectorizer_text_embedding_3_large import OpenAiVectorizerTextEmbedding3Large
|
16
|
+
from .open_ai_vectorizer_text_embedding_3_small import OpenAiVectorizerTextEmbedding3Small
|
17
|
+
from .open_ai_vectorizer_text_embedding_ada_002 import OpenAiVectorizerTextEmbeddingAda002
|
18
|
+
|
19
|
+
|
20
|
+
class IndexingConfigVectorizer_TextEmbedding3Small(OpenAiVectorizerTextEmbedding3Small):
|
21
|
+
model_name: typing.Literal["text-embedding-3-small"] = "text-embedding-3-small"
|
22
|
+
|
23
|
+
class Config:
|
24
|
+
frozen = True
|
25
|
+
smart_union = True
|
26
|
+
allow_population_by_field_name = True
|
27
|
+
populate_by_name = True
|
28
|
+
|
29
|
+
|
30
|
+
class IndexingConfigVectorizer_TextEmbedding3Large(OpenAiVectorizerTextEmbedding3Large):
|
31
|
+
model_name: typing.Literal["text-embedding-3-large"] = "text-embedding-3-large"
|
32
|
+
|
33
|
+
class Config:
|
34
|
+
frozen = True
|
35
|
+
smart_union = True
|
36
|
+
allow_population_by_field_name = True
|
37
|
+
populate_by_name = True
|
38
|
+
|
39
|
+
|
40
|
+
class IndexingConfigVectorizer_TextEmbeddingAda002(OpenAiVectorizerTextEmbeddingAda002):
|
41
|
+
model_name: typing.Literal["text-embedding-ada-002"] = "text-embedding-ada-002"
|
42
|
+
|
43
|
+
class Config:
|
44
|
+
frozen = True
|
45
|
+
smart_union = True
|
46
|
+
allow_population_by_field_name = True
|
47
|
+
populate_by_name = True
|
48
|
+
|
49
|
+
|
50
|
+
class IndexingConfigVectorizer_IntfloatMultilingualE5Large(BasicVectorizerIntfloatMultilingualE5Large):
|
51
|
+
model_name: typing.Literal["intfloat/multilingual-e5-large"] = "intfloat/multilingual-e5-large"
|
52
|
+
|
53
|
+
class Config:
|
54
|
+
frozen = True
|
55
|
+
smart_union = True
|
56
|
+
allow_population_by_field_name = True
|
57
|
+
populate_by_name = True
|
58
|
+
|
59
|
+
|
60
|
+
class IndexingConfigVectorizer_SentenceTransformersMultiQaMpnetBaseCosV1(
|
61
|
+
BasicVectorizerSentenceTransformersMultiQaMpnetBaseCosV1
|
62
|
+
):
|
63
|
+
model_name: typing.Literal[
|
64
|
+
"sentence-transformers/multi-qa-mpnet-base-cos-v1"
|
65
|
+
] = "sentence-transformers/multi-qa-mpnet-base-cos-v1"
|
66
|
+
|
67
|
+
class Config:
|
68
|
+
frozen = True
|
69
|
+
smart_union = True
|
70
|
+
allow_population_by_field_name = True
|
71
|
+
populate_by_name = True
|
72
|
+
|
73
|
+
|
74
|
+
class IndexingConfigVectorizer_SentenceTransformersMultiQaMpnetBaseDotV1(
|
75
|
+
BasicVectorizerSentenceTransformersMultiQaMpnetBaseDotV1
|
76
|
+
):
|
77
|
+
model_name: typing.Literal[
|
78
|
+
"sentence-transformers/multi-qa-mpnet-base-dot-v1"
|
79
|
+
] = "sentence-transformers/multi-qa-mpnet-base-dot-v1"
|
80
|
+
|
81
|
+
class Config:
|
82
|
+
frozen = True
|
83
|
+
smart_union = True
|
84
|
+
allow_population_by_field_name = True
|
85
|
+
populate_by_name = True
|
86
|
+
|
87
|
+
|
88
|
+
class IndexingConfigVectorizer_HkunlpInstructorXl(HkunlpInstructorXlVectorizer):
|
89
|
+
model_name: typing.Literal["hkunlp/instructor-xl"] = "hkunlp/instructor-xl"
|
90
|
+
|
91
|
+
class Config:
|
92
|
+
frozen = True
|
93
|
+
smart_union = True
|
94
|
+
allow_population_by_field_name = True
|
95
|
+
populate_by_name = True
|
96
|
+
|
97
|
+
|
98
|
+
IndexingConfigVectorizer = typing.Union[
|
99
|
+
IndexingConfigVectorizer_TextEmbedding3Small,
|
100
|
+
IndexingConfigVectorizer_TextEmbedding3Large,
|
101
|
+
IndexingConfigVectorizer_TextEmbeddingAda002,
|
102
|
+
IndexingConfigVectorizer_IntfloatMultilingualE5Large,
|
103
|
+
IndexingConfigVectorizer_SentenceTransformersMultiQaMpnetBaseCosV1,
|
104
|
+
IndexingConfigVectorizer_SentenceTransformersMultiQaMpnetBaseDotV1,
|
105
|
+
IndexingConfigVectorizer_HkunlpInstructorXl,
|
106
|
+
]
|