vellum-ai 0.0.1__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- vellum/__init__.py +98 -0
- vellum/client.py +203 -0
- vellum/core/__init__.py +8 -0
- vellum/core/api_error.py +15 -0
- vellum/core/datetime_utils.py +28 -0
- vellum/core/jsonable_encoder.py +94 -0
- vellum/core/remove_none_from_headers.py +11 -0
- vellum/environment.py +17 -0
- vellum/py.typed +0 -0
- vellum/resources/__init__.py +5 -0
- vellum/resources/documents/__init__.py +2 -0
- vellum/resources/documents/client.py +135 -0
- vellum/resources/model_versions/__init__.py +2 -0
- vellum/resources/model_versions/client.py +53 -0
- vellum/types/__init__.py +91 -0
- vellum/types/block_type_enum.py +29 -0
- vellum/types/chat_role_enum.py +25 -0
- vellum/types/document.py +30 -0
- vellum/types/document_document_to_document_index.py +31 -0
- vellum/types/enriched_normalized_completion.py +39 -0
- vellum/types/finish_reason_enum.py +25 -0
- vellum/types/generate_error_response.py +24 -0
- vellum/types/generate_options_request.py +27 -0
- vellum/types/generate_request_request.py +31 -0
- vellum/types/generate_response.py +39 -0
- vellum/types/generate_result.py +35 -0
- vellum/types/generate_result_data.py +27 -0
- vellum/types/generate_result_error.py +24 -0
- vellum/types/indexing_state_enum.py +33 -0
- vellum/types/logprobs_enum.py +17 -0
- vellum/types/model_type_enum.py +17 -0
- vellum/types/model_version_build_config.py +34 -0
- vellum/types/model_version_exec_config_parameters.py +31 -0
- vellum/types/model_version_exec_config_read.py +35 -0
- vellum/types/model_version_read.py +43 -0
- vellum/types/model_version_read_status_enum.py +29 -0
- vellum/types/model_version_sandbox_snapshot.py +25 -0
- vellum/types/normalized_log_probs.py +26 -0
- vellum/types/normalized_token_log_probs.py +27 -0
- vellum/types/paginated_slim_document_list.py +28 -0
- vellum/types/processing_state_enum.py +29 -0
- vellum/types/prompt_template_block.py +27 -0
- vellum/types/prompt_template_block_data.py +26 -0
- vellum/types/prompt_template_block_properties.py +28 -0
- vellum/types/provider_enum.py +37 -0
- vellum/types/search_error_response.py +24 -0
- vellum/types/search_filters_request.py +26 -0
- vellum/types/search_request_options_request.py +36 -0
- vellum/types/search_response.py +27 -0
- vellum/types/search_result.py +30 -0
- vellum/types/search_result_merging_request.py +24 -0
- vellum/types/search_weights_request.py +25 -0
- vellum/types/slim_document.py +44 -0
- vellum/types/slim_document_status_enum.py +14 -0
- vellum/types/submit_completion_actual_request.py +46 -0
- vellum/types/submit_completion_actuals_error_response.py +24 -0
- vellum/types/upload_document_error_response.py +24 -0
- vellum/types/upload_document_response.py +24 -0
- vellum_ai-0.0.1.dist-info/METADATA +15 -0
- vellum_ai-0.0.1.dist-info/RECORD +61 -0
- vellum_ai-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,31 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class ModelVersionExecConfigParameters(pydantic.BaseModel):
|
12
|
+
temperature: float
|
13
|
+
max_tokens: int
|
14
|
+
stop: typing.List[str]
|
15
|
+
top_p: float
|
16
|
+
top_k: typing.Optional[float]
|
17
|
+
frequency_penalty: float
|
18
|
+
presence_penalty: float
|
19
|
+
logit_bias: typing.Optional[typing.Dict[str, typing.Optional[float]]]
|
20
|
+
|
21
|
+
def json(self, **kwargs: typing.Any) -> str:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().json(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
26
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
27
|
+
return super().dict(**kwargs_with_defaults)
|
28
|
+
|
29
|
+
class Config:
|
30
|
+
frozen = True
|
31
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .model_version_exec_config_parameters import ModelVersionExecConfigParameters
|
10
|
+
from .prompt_template_block_data import PromptTemplateBlockData
|
11
|
+
|
12
|
+
|
13
|
+
class ModelVersionExecConfigRead(pydantic.BaseModel):
|
14
|
+
parameters: ModelVersionExecConfigParameters = pydantic.Field(
|
15
|
+
description=("The generation parameters that are passed to the LLM provider at runtime.\n")
|
16
|
+
)
|
17
|
+
input_variables: typing.List[str] = pydantic.Field(
|
18
|
+
description=("Names of the template variables specified in the prompt template.\n")
|
19
|
+
)
|
20
|
+
prompt_template: str = pydantic.Field(
|
21
|
+
description=("The template used to generate prompts for this model version.\n")
|
22
|
+
)
|
23
|
+
prompt_block_data: typing.Optional[PromptTemplateBlockData]
|
24
|
+
|
25
|
+
def json(self, **kwargs: typing.Any) -> str:
|
26
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
27
|
+
return super().json(**kwargs_with_defaults)
|
28
|
+
|
29
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
30
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
31
|
+
return super().dict(**kwargs_with_defaults)
|
32
|
+
|
33
|
+
class Config:
|
34
|
+
frozen = True
|
35
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,43 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .model_type_enum import ModelTypeEnum
|
10
|
+
from .model_version_build_config import ModelVersionBuildConfig
|
11
|
+
from .model_version_exec_config_read import ModelVersionExecConfigRead
|
12
|
+
from .model_version_read_status_enum import ModelVersionReadStatusEnum
|
13
|
+
from .provider_enum import ProviderEnum
|
14
|
+
|
15
|
+
|
16
|
+
class ModelVersionRead(pydantic.BaseModel):
|
17
|
+
id: str = pydantic.Field(description=("Vellum-generated ID that uniquely identifies this model version.\n"))
|
18
|
+
created: str = pydantic.Field(description=("Timestamp of when this model version was created.\n"))
|
19
|
+
label: str = pydantic.Field(description=("Human-friendly name for this model version.\n"))
|
20
|
+
model_type: ModelTypeEnum = pydantic.Field(description=("The type of task this model is used for.\n"))
|
21
|
+
provider: ProviderEnum = pydantic.Field(description=("Which LLM provider this model version is associated with.\n"))
|
22
|
+
external_id: str = pydantic.Field(
|
23
|
+
description=("The unique id of this model version as it exists in the above provider's system.\n")
|
24
|
+
)
|
25
|
+
build_config: ModelVersionBuildConfig = pydantic.Field(
|
26
|
+
description=("Configuration used to build this model version.\n")
|
27
|
+
)
|
28
|
+
exec_config: ModelVersionExecConfigRead = pydantic.Field(
|
29
|
+
description=("Configuration used to execute this model version.\n")
|
30
|
+
)
|
31
|
+
status: typing.Optional[ModelVersionReadStatusEnum]
|
32
|
+
|
33
|
+
def json(self, **kwargs: typing.Any) -> str:
|
34
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
35
|
+
return super().json(**kwargs_with_defaults)
|
36
|
+
|
37
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
38
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
39
|
+
return super().dict(**kwargs_with_defaults)
|
40
|
+
|
41
|
+
class Config:
|
42
|
+
frozen = True
|
43
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import enum
|
4
|
+
import typing
|
5
|
+
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
7
|
+
|
8
|
+
|
9
|
+
class ModelVersionReadStatusEnum(str, enum.Enum):
|
10
|
+
CREATING = "CREATING"
|
11
|
+
READY = "READY"
|
12
|
+
CREATION_FAILED = "CREATION_FAILED"
|
13
|
+
DISABLED = "DISABLED"
|
14
|
+
|
15
|
+
def visit(
|
16
|
+
self,
|
17
|
+
creating: typing.Callable[[], T_Result],
|
18
|
+
ready: typing.Callable[[], T_Result],
|
19
|
+
creation_failed: typing.Callable[[], T_Result],
|
20
|
+
disabled: typing.Callable[[], T_Result],
|
21
|
+
) -> T_Result:
|
22
|
+
if self is ModelVersionReadStatusEnum.CREATING:
|
23
|
+
return creating()
|
24
|
+
if self is ModelVersionReadStatusEnum.READY:
|
25
|
+
return ready()
|
26
|
+
if self is ModelVersionReadStatusEnum.CREATION_FAILED:
|
27
|
+
return creation_failed()
|
28
|
+
if self is ModelVersionReadStatusEnum.DISABLED:
|
29
|
+
return disabled()
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class ModelVersionSandboxSnapshot(pydantic.BaseModel):
|
12
|
+
id: str = pydantic.Field(description=("The ID of the sandbox snapshot.\n"))
|
13
|
+
prompt_index: int = pydantic.Field(description=("The index of the prompt in the sandbox snapshot.\n"))
|
14
|
+
|
15
|
+
def json(self, **kwargs: typing.Any) -> str:
|
16
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
17
|
+
return super().json(**kwargs_with_defaults)
|
18
|
+
|
19
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
20
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
21
|
+
return super().dict(**kwargs_with_defaults)
|
22
|
+
|
23
|
+
class Config:
|
24
|
+
frozen = True
|
25
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .normalized_token_log_probs import NormalizedTokenLogProbs
|
10
|
+
|
11
|
+
|
12
|
+
class NormalizedLogProbs(pydantic.BaseModel):
|
13
|
+
tokens: typing.List[NormalizedTokenLogProbs]
|
14
|
+
likelihood: float
|
15
|
+
|
16
|
+
def json(self, **kwargs: typing.Any) -> str:
|
17
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
+
return super().json(**kwargs_with_defaults)
|
19
|
+
|
20
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
+
return super().dict(**kwargs_with_defaults)
|
23
|
+
|
24
|
+
class Config:
|
25
|
+
frozen = True
|
26
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class NormalizedTokenLogProbs(pydantic.BaseModel):
|
12
|
+
token: str
|
13
|
+
logprob: float
|
14
|
+
top_logprobs: typing.Dict[str, float]
|
15
|
+
text_offset: int
|
16
|
+
|
17
|
+
def json(self, **kwargs: typing.Any) -> str:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().json(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().dict(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .slim_document import SlimDocument
|
10
|
+
|
11
|
+
|
12
|
+
class PaginatedSlimDocumentList(pydantic.BaseModel):
|
13
|
+
count: typing.Optional[int]
|
14
|
+
next: typing.Optional[str]
|
15
|
+
previous: typing.Optional[str]
|
16
|
+
results: typing.Optional[typing.List[SlimDocument]]
|
17
|
+
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().json(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
25
|
+
|
26
|
+
class Config:
|
27
|
+
frozen = True
|
28
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import enum
|
4
|
+
import typing
|
5
|
+
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
7
|
+
|
8
|
+
|
9
|
+
class ProcessingStateEnum(str, enum.Enum):
|
10
|
+
QUEUED = "QUEUED"
|
11
|
+
PROCESSING = "PROCESSING"
|
12
|
+
PROCESSED = "PROCESSED"
|
13
|
+
FAILED = "FAILED"
|
14
|
+
|
15
|
+
def visit(
|
16
|
+
self,
|
17
|
+
queued: typing.Callable[[], T_Result],
|
18
|
+
processing: typing.Callable[[], T_Result],
|
19
|
+
processed: typing.Callable[[], T_Result],
|
20
|
+
failed: typing.Callable[[], T_Result],
|
21
|
+
) -> T_Result:
|
22
|
+
if self is ProcessingStateEnum.QUEUED:
|
23
|
+
return queued()
|
24
|
+
if self is ProcessingStateEnum.PROCESSING:
|
25
|
+
return processing()
|
26
|
+
if self is ProcessingStateEnum.PROCESSED:
|
27
|
+
return processed()
|
28
|
+
if self is ProcessingStateEnum.FAILED:
|
29
|
+
return failed()
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .block_type_enum import BlockTypeEnum
|
10
|
+
from .prompt_template_block_properties import PromptTemplateBlockProperties
|
11
|
+
|
12
|
+
|
13
|
+
class PromptTemplateBlock(pydantic.BaseModel):
|
14
|
+
block_type: BlockTypeEnum
|
15
|
+
properties: PromptTemplateBlockProperties
|
16
|
+
|
17
|
+
def json(self, **kwargs: typing.Any) -> str:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().json(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().dict(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .prompt_template_block import PromptTemplateBlock
|
10
|
+
|
11
|
+
|
12
|
+
class PromptTemplateBlockData(pydantic.BaseModel):
|
13
|
+
version: int
|
14
|
+
blocks: typing.List[PromptTemplateBlock]
|
15
|
+
|
16
|
+
def json(self, **kwargs: typing.Any) -> str:
|
17
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
+
return super().json(**kwargs_with_defaults)
|
19
|
+
|
20
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
+
return super().dict(**kwargs_with_defaults)
|
23
|
+
|
24
|
+
class Config:
|
25
|
+
frozen = True
|
26
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,28 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .chat_role_enum import ChatRoleEnum
|
10
|
+
|
11
|
+
|
12
|
+
class PromptTemplateBlockProperties(pydantic.BaseModel):
|
13
|
+
chat_role: typing.Optional[ChatRoleEnum]
|
14
|
+
text: typing.Optional[str]
|
15
|
+
variable_name: typing.Optional[str]
|
16
|
+
blocks: typing.Optional[typing.List[typing.Dict[str, typing.Any]]]
|
17
|
+
|
18
|
+
def json(self, **kwargs: typing.Any) -> str:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().json(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
23
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
24
|
+
return super().dict(**kwargs_with_defaults)
|
25
|
+
|
26
|
+
class Config:
|
27
|
+
frozen = True
|
28
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import enum
|
4
|
+
import typing
|
5
|
+
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
7
|
+
|
8
|
+
|
9
|
+
class ProviderEnum(str, enum.Enum):
|
10
|
+
ANTHROPIC = "ANTHROPIC"
|
11
|
+
COHERE = "COHERE"
|
12
|
+
GOOGLE = "GOOGLE"
|
13
|
+
HOSTED = "HOSTED"
|
14
|
+
OPENAI = "OPENAI"
|
15
|
+
PYQ = "PYQ"
|
16
|
+
|
17
|
+
def visit(
|
18
|
+
self,
|
19
|
+
anthropic: typing.Callable[[], T_Result],
|
20
|
+
cohere: typing.Callable[[], T_Result],
|
21
|
+
google: typing.Callable[[], T_Result],
|
22
|
+
hosted: typing.Callable[[], T_Result],
|
23
|
+
openai: typing.Callable[[], T_Result],
|
24
|
+
pyq: typing.Callable[[], T_Result],
|
25
|
+
) -> T_Result:
|
26
|
+
if self is ProviderEnum.ANTHROPIC:
|
27
|
+
return anthropic()
|
28
|
+
if self is ProviderEnum.COHERE:
|
29
|
+
return cohere()
|
30
|
+
if self is ProviderEnum.GOOGLE:
|
31
|
+
return google()
|
32
|
+
if self is ProviderEnum.HOSTED:
|
33
|
+
return hosted()
|
34
|
+
if self is ProviderEnum.OPENAI:
|
35
|
+
return openai()
|
36
|
+
if self is ProviderEnum.PYQ:
|
37
|
+
return pyq()
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class SearchErrorResponse(pydantic.BaseModel):
|
12
|
+
detail: str = pydantic.Field(description=("Details about why the request failed.\n"))
|
13
|
+
|
14
|
+
def json(self, **kwargs: typing.Any) -> str:
|
15
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
16
|
+
return super().json(**kwargs_with_defaults)
|
17
|
+
|
18
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().dict(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
class Config:
|
23
|
+
frozen = True
|
24
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,26 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class SearchFiltersRequest(pydantic.BaseModel):
|
12
|
+
external_ids: typing.Optional[typing.List[str]] = pydantic.Field(
|
13
|
+
description=("The document external IDs to filter by\n")
|
14
|
+
)
|
15
|
+
|
16
|
+
def json(self, **kwargs: typing.Any) -> str:
|
17
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
18
|
+
return super().json(**kwargs_with_defaults)
|
19
|
+
|
20
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
21
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
+
return super().dict(**kwargs_with_defaults)
|
23
|
+
|
24
|
+
class Config:
|
25
|
+
frozen = True
|
26
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .search_filters_request import SearchFiltersRequest
|
10
|
+
from .search_result_merging_request import SearchResultMergingRequest
|
11
|
+
from .search_weights_request import SearchWeightsRequest
|
12
|
+
|
13
|
+
|
14
|
+
class SearchRequestOptionsRequest(pydantic.BaseModel):
|
15
|
+
limit: typing.Optional[int] = pydantic.Field(description=("The maximum number of results to return.\n"))
|
16
|
+
weights: typing.Optional[SearchWeightsRequest] = pydantic.Field(
|
17
|
+
description=("The weights to use for the search. Must add up to 1.0.\n")
|
18
|
+
)
|
19
|
+
result_merging: typing.Optional[SearchResultMergingRequest] = pydantic.Field(
|
20
|
+
description=("The configuration for merging results.\n")
|
21
|
+
)
|
22
|
+
filters: typing.Optional[SearchFiltersRequest] = pydantic.Field(
|
23
|
+
description=("The filters to apply to the search.\n")
|
24
|
+
)
|
25
|
+
|
26
|
+
def json(self, **kwargs: typing.Any) -> str:
|
27
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
28
|
+
return super().json(**kwargs_with_defaults)
|
29
|
+
|
30
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
31
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
32
|
+
return super().dict(**kwargs_with_defaults)
|
33
|
+
|
34
|
+
class Config:
|
35
|
+
frozen = True
|
36
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .search_result import SearchResult
|
10
|
+
|
11
|
+
|
12
|
+
class SearchResponse(pydantic.BaseModel):
|
13
|
+
results: typing.List[SearchResult] = pydantic.Field(
|
14
|
+
description=("The results of the search. Each result represents a chunk that matches the search query.\n")
|
15
|
+
)
|
16
|
+
|
17
|
+
def json(self, **kwargs: typing.Any) -> str:
|
18
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
19
|
+
return super().json(**kwargs_with_defaults)
|
20
|
+
|
21
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
22
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
23
|
+
return super().dict(**kwargs_with_defaults)
|
24
|
+
|
25
|
+
class Config:
|
26
|
+
frozen = True
|
27
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .document import Document
|
10
|
+
|
11
|
+
|
12
|
+
class SearchResult(pydantic.BaseModel):
|
13
|
+
document: Document = pydantic.Field(
|
14
|
+
description=("The document that contains the chunk that matched the search query.\n")
|
15
|
+
)
|
16
|
+
text: str = pydantic.Field(description=("The text of the chunk that matched the search query.\n"))
|
17
|
+
keywords: typing.List[str]
|
18
|
+
score: float = pydantic.Field(description=("A score representing how well the chunk matches the search query.\n"))
|
19
|
+
|
20
|
+
def json(self, **kwargs: typing.Any) -> str:
|
21
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
22
|
+
return super().json(**kwargs_with_defaults)
|
23
|
+
|
24
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
25
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
26
|
+
return super().dict(**kwargs_with_defaults)
|
27
|
+
|
28
|
+
class Config:
|
29
|
+
frozen = True
|
30
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class SearchResultMergingRequest(pydantic.BaseModel):
|
12
|
+
enabled: bool = pydantic.Field(description=("Whether to enable merging results\n"))
|
13
|
+
|
14
|
+
def json(self, **kwargs: typing.Any) -> str:
|
15
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
16
|
+
return super().json(**kwargs_with_defaults)
|
17
|
+
|
18
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
19
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
20
|
+
return super().dict(**kwargs_with_defaults)
|
21
|
+
|
22
|
+
class Config:
|
23
|
+
frozen = True
|
24
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
|
10
|
+
|
11
|
+
class SearchWeightsRequest(pydantic.BaseModel):
|
12
|
+
semantic_similarity: float = pydantic.Field(description=("The relative weight to give to semantic similarity\n"))
|
13
|
+
keywords: float = pydantic.Field(description=("The relative weight to give to keyword matches\n"))
|
14
|
+
|
15
|
+
def json(self, **kwargs: typing.Any) -> str:
|
16
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
17
|
+
return super().json(**kwargs_with_defaults)
|
18
|
+
|
19
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
20
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
21
|
+
return super().dict(**kwargs_with_defaults)
|
22
|
+
|
23
|
+
class Config:
|
24
|
+
frozen = True
|
25
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import datetime as dt
|
4
|
+
import typing
|
5
|
+
|
6
|
+
import pydantic
|
7
|
+
|
8
|
+
from ..core.datetime_utils import serialize_datetime
|
9
|
+
from .document_document_to_document_index import DocumentDocumentToDocumentIndex
|
10
|
+
from .processing_state_enum import ProcessingStateEnum
|
11
|
+
from .slim_document_status_enum import SlimDocumentStatusEnum
|
12
|
+
|
13
|
+
|
14
|
+
class SlimDocument(pydantic.BaseModel):
|
15
|
+
id: str = pydantic.Field(description=("Vellum-generated ID that uniquely identifies this document.\n"))
|
16
|
+
external_id: typing.Optional[str] = pydantic.Field(
|
17
|
+
description=("The external ID that was originally provided when uploading the document.\n")
|
18
|
+
)
|
19
|
+
last_uploaded_at: str = pydantic.Field(
|
20
|
+
description=("A timestamp representing when this document was most recently uploaded.\n")
|
21
|
+
)
|
22
|
+
label: str = pydantic.Field(description=("Human-friendly name for this document.\n"))
|
23
|
+
processing_state: typing.Optional[ProcessingStateEnum] = pydantic.Field(
|
24
|
+
description=("The current processing state of the document\n")
|
25
|
+
)
|
26
|
+
status: typing.Optional[SlimDocumentStatusEnum] = pydantic.Field(description=("The document's current status.\n"))
|
27
|
+
keywords: typing.Optional[typing.List[str]] = pydantic.Field(
|
28
|
+
description=(
|
29
|
+
"A list of keywords associated with this document. Originally provided when uploading the document.\n"
|
30
|
+
)
|
31
|
+
)
|
32
|
+
document_to_document_indexes: typing.List[DocumentDocumentToDocumentIndex]
|
33
|
+
|
34
|
+
def json(self, **kwargs: typing.Any) -> str:
|
35
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
36
|
+
return super().json(**kwargs_with_defaults)
|
37
|
+
|
38
|
+
def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
|
39
|
+
kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
|
40
|
+
return super().dict(**kwargs_with_defaults)
|
41
|
+
|
42
|
+
class Config:
|
43
|
+
frozen = True
|
44
|
+
json_encoders = {dt.datetime: serialize_datetime}
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# This file was auto-generated by Fern from our API Definition.
|
2
|
+
|
3
|
+
import enum
|
4
|
+
import typing
|
5
|
+
|
6
|
+
T_Result = typing.TypeVar("T_Result")
|
7
|
+
|
8
|
+
|
9
|
+
class SlimDocumentStatusEnum(str, enum.Enum):
|
10
|
+
ACTIVE = "ACTIVE"
|
11
|
+
|
12
|
+
def visit(self, active: typing.Callable[[], T_Result]) -> T_Result:
|
13
|
+
if self is SlimDocumentStatusEnum.ACTIVE:
|
14
|
+
return active()
|