vellum-ai 0.7.6__py3-none-any.whl → 0.7.7__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
vellum/__init__.py CHANGED
@@ -236,8 +236,10 @@ from .types import (
236
236
  MlModelTokenizerConfig_Tiktoken,
237
237
  MlModelUsage,
238
238
  NamedScenarioInputChatHistoryVariableValueRequest,
239
+ NamedScenarioInputJsonVariableValueRequest,
239
240
  NamedScenarioInputRequest,
240
241
  NamedScenarioInputRequest_ChatHistory,
242
+ NamedScenarioInputRequest_Json,
241
243
  NamedScenarioInputRequest_String,
242
244
  NamedScenarioInputStringVariableValueRequest,
243
245
  NamedTestCaseArrayVariableValue,
@@ -395,8 +397,10 @@ from .types import (
395
397
  SandboxScenario,
396
398
  ScenarioInput,
397
399
  ScenarioInputChatHistoryVariableValue,
400
+ ScenarioInputJsonVariableValue,
398
401
  ScenarioInputStringVariableValue,
399
402
  ScenarioInput_ChatHistory,
403
+ ScenarioInput_Json,
400
404
  ScenarioInput_String,
401
405
  SearchFiltersRequest,
402
406
  SearchNodeResult,
@@ -922,8 +926,10 @@ __all__ = [
922
926
  "MlModelTokenizerConfig_Tiktoken",
923
927
  "MlModelUsage",
924
928
  "NamedScenarioInputChatHistoryVariableValueRequest",
929
+ "NamedScenarioInputJsonVariableValueRequest",
925
930
  "NamedScenarioInputRequest",
926
931
  "NamedScenarioInputRequest_ChatHistory",
932
+ "NamedScenarioInputRequest_Json",
927
933
  "NamedScenarioInputRequest_String",
928
934
  "NamedScenarioInputStringVariableValueRequest",
929
935
  "NamedTestCaseArrayVariableValue",
@@ -1082,8 +1088,10 @@ __all__ = [
1082
1088
  "SandboxScenario",
1083
1089
  "ScenarioInput",
1084
1090
  "ScenarioInputChatHistoryVariableValue",
1091
+ "ScenarioInputJsonVariableValue",
1085
1092
  "ScenarioInputStringVariableValue",
1086
1093
  "ScenarioInput_ChatHistory",
1094
+ "ScenarioInput_Json",
1087
1095
  "ScenarioInput_String",
1088
1096
  "SearchFiltersRequest",
1089
1097
  "SearchNodeResult",
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.7.6",
21
+ "X-Fern-SDK-Version": "0.7.7",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -244,7 +244,7 @@ class MlModelsClient:
244
244
  Parameters
245
245
  ----------
246
246
  id : str
247
- A UUID string identifying this ml model.
247
+ Either the ML Model's ID or its unique name
248
248
 
249
249
  request_options : typing.Optional[RequestOptions]
250
250
  Request-specific configuration.
@@ -293,7 +293,7 @@ class MlModelsClient:
293
293
  Parameters
294
294
  ----------
295
295
  id : str
296
- A UUID string identifying this ml model.
296
+ Either the ML Model's ID or its unique name
297
297
 
298
298
  display_config : typing.Optional[MlModelDisplayConfigRequest]
299
299
  Configuration for how to display the ML Model.
@@ -355,7 +355,7 @@ class MlModelsClient:
355
355
  Parameters
356
356
  ----------
357
357
  id : str
358
- A UUID string identifying this ml model.
358
+ Either the ML Model's ID or its unique name
359
359
 
360
360
  display_config : typing.Optional[MlModelDisplayConfigRequest]
361
361
  Configuration for how to display the ML Model.
@@ -642,7 +642,7 @@ class AsyncMlModelsClient:
642
642
  Parameters
643
643
  ----------
644
644
  id : str
645
- A UUID string identifying this ml model.
645
+ Either the ML Model's ID or its unique name
646
646
 
647
647
  request_options : typing.Optional[RequestOptions]
648
648
  Request-specific configuration.
@@ -699,7 +699,7 @@ class AsyncMlModelsClient:
699
699
  Parameters
700
700
  ----------
701
701
  id : str
702
- A UUID string identifying this ml model.
702
+ Either the ML Model's ID or its unique name
703
703
 
704
704
  display_config : typing.Optional[MlModelDisplayConfigRequest]
705
705
  Configuration for how to display the ML Model.
@@ -769,7 +769,7 @@ class AsyncMlModelsClient:
769
769
  Parameters
770
770
  ----------
771
771
  id : str
772
- A UUID string identifying this ml model.
772
+ Either the ML Model's ID or its unique name
773
773
 
774
774
  display_config : typing.Optional[MlModelDisplayConfigRequest]
775
775
  Configuration for how to display the ML Model.
vellum/types/__init__.py CHANGED
@@ -279,9 +279,11 @@ from .ml_model_tokenizer_config_request import (
279
279
  )
280
280
  from .ml_model_usage import MlModelUsage
281
281
  from .named_scenario_input_chat_history_variable_value_request import NamedScenarioInputChatHistoryVariableValueRequest
282
+ from .named_scenario_input_json_variable_value_request import NamedScenarioInputJsonVariableValueRequest
282
283
  from .named_scenario_input_request import (
283
284
  NamedScenarioInputRequest,
284
285
  NamedScenarioInputRequest_ChatHistory,
286
+ NamedScenarioInputRequest_Json,
285
287
  NamedScenarioInputRequest_String,
286
288
  )
287
289
  from .named_scenario_input_string_variable_value_request import NamedScenarioInputStringVariableValueRequest
@@ -454,8 +456,9 @@ from .rejected_workflow_node_result_event import RejectedWorkflowNodeResultEvent
454
456
  from .release_tag_source import ReleaseTagSource
455
457
  from .replace_test_suite_test_case_request import ReplaceTestSuiteTestCaseRequest
456
458
  from .sandbox_scenario import SandboxScenario
457
- from .scenario_input import ScenarioInput, ScenarioInput_ChatHistory, ScenarioInput_String
459
+ from .scenario_input import ScenarioInput, ScenarioInput_ChatHistory, ScenarioInput_Json, ScenarioInput_String
458
460
  from .scenario_input_chat_history_variable_value import ScenarioInputChatHistoryVariableValue
461
+ from .scenario_input_json_variable_value import ScenarioInputJsonVariableValue
459
462
  from .scenario_input_string_variable_value import ScenarioInputStringVariableValue
460
463
  from .search_filters_request import SearchFiltersRequest
461
464
  from .search_node_result import SearchNodeResult
@@ -987,8 +990,10 @@ __all__ = [
987
990
  "MlModelTokenizerConfig_Tiktoken",
988
991
  "MlModelUsage",
989
992
  "NamedScenarioInputChatHistoryVariableValueRequest",
993
+ "NamedScenarioInputJsonVariableValueRequest",
990
994
  "NamedScenarioInputRequest",
991
995
  "NamedScenarioInputRequest_ChatHistory",
996
+ "NamedScenarioInputRequest_Json",
992
997
  "NamedScenarioInputRequest_String",
993
998
  "NamedScenarioInputStringVariableValueRequest",
994
999
  "NamedTestCaseArrayVariableValue",
@@ -1146,8 +1151,10 @@ __all__ = [
1146
1151
  "SandboxScenario",
1147
1152
  "ScenarioInput",
1148
1153
  "ScenarioInputChatHistoryVariableValue",
1154
+ "ScenarioInputJsonVariableValue",
1149
1155
  "ScenarioInputStringVariableValue",
1150
1156
  "ScenarioInput_ChatHistory",
1157
+ "ScenarioInput_Json",
1151
1158
  "ScenarioInput_String",
1152
1159
  "SearchFiltersRequest",
1153
1160
  "SearchNodeResult",
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
8
+
9
+
10
+ class NamedScenarioInputJsonVariableValueRequest(pydantic_v1.BaseModel):
11
+ """
12
+ Named Prompt Sandbox Scenario input value that is of type JSON
13
+ """
14
+
15
+ value: typing.Any
16
+ name: str
17
+
18
+ def json(self, **kwargs: typing.Any) -> str:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().json(**kwargs_with_defaults)
21
+
22
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
23
+ kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
25
+
26
+ return deep_union_pydantic_dicts(
27
+ super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
28
+ )
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ extra = pydantic_v1.Extra.allow
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -34,6 +34,30 @@ class NamedScenarioInputRequest_String(pydantic_v1.BaseModel):
34
34
  json_encoders = {dt.datetime: serialize_datetime}
35
35
 
36
36
 
37
+ class NamedScenarioInputRequest_Json(pydantic_v1.BaseModel):
38
+ value: typing.Any
39
+ name: str
40
+ type: typing.Literal["JSON"] = "JSON"
41
+
42
+ def json(self, **kwargs: typing.Any) -> str:
43
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
44
+ return super().json(**kwargs_with_defaults)
45
+
46
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
47
+ kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
48
+ kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
49
+
50
+ return deep_union_pydantic_dicts(
51
+ super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
52
+ )
53
+
54
+ class Config:
55
+ frozen = True
56
+ smart_union = True
57
+ extra = pydantic_v1.Extra.allow
58
+ json_encoders = {dt.datetime: serialize_datetime}
59
+
60
+
37
61
  class NamedScenarioInputRequest_ChatHistory(pydantic_v1.BaseModel):
38
62
  value: typing.Optional[typing.List[ChatMessageRequest]] = None
39
63
  name: str
@@ -58,4 +82,6 @@ class NamedScenarioInputRequest_ChatHistory(pydantic_v1.BaseModel):
58
82
  json_encoders = {dt.datetime: serialize_datetime}
59
83
 
60
84
 
61
- NamedScenarioInputRequest = typing.Union[NamedScenarioInputRequest_String, NamedScenarioInputRequest_ChatHistory]
85
+ NamedScenarioInputRequest = typing.Union[
86
+ NamedScenarioInputRequest_String, NamedScenarioInputRequest_Json, NamedScenarioInputRequest_ChatHistory
87
+ ]
@@ -34,6 +34,30 @@ class ScenarioInput_String(pydantic_v1.BaseModel):
34
34
  json_encoders = {dt.datetime: serialize_datetime}
35
35
 
36
36
 
37
+ class ScenarioInput_Json(pydantic_v1.BaseModel):
38
+ value: typing.Any
39
+ input_variable_id: str
40
+ type: typing.Literal["JSON"] = "JSON"
41
+
42
+ def json(self, **kwargs: typing.Any) -> str:
43
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
44
+ return super().json(**kwargs_with_defaults)
45
+
46
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
47
+ kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
48
+ kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
49
+
50
+ return deep_union_pydantic_dicts(
51
+ super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
52
+ )
53
+
54
+ class Config:
55
+ frozen = True
56
+ smart_union = True
57
+ extra = pydantic_v1.Extra.allow
58
+ json_encoders = {dt.datetime: serialize_datetime}
59
+
60
+
37
61
  class ScenarioInput_ChatHistory(pydantic_v1.BaseModel):
38
62
  value: typing.Optional[typing.List[ChatMessage]] = None
39
63
  input_variable_id: str
@@ -58,4 +82,4 @@ class ScenarioInput_ChatHistory(pydantic_v1.BaseModel):
58
82
  json_encoders = {dt.datetime: serialize_datetime}
59
83
 
60
84
 
61
- ScenarioInput = typing.Union[ScenarioInput_String, ScenarioInput_ChatHistory]
85
+ ScenarioInput = typing.Union[ScenarioInput_String, ScenarioInput_Json, ScenarioInput_ChatHistory]
@@ -0,0 +1,34 @@
1
+ # This file was auto-generated by Fern from our API Definition.
2
+
3
+ import datetime as dt
4
+ import typing
5
+
6
+ from ..core.datetime_utils import serialize_datetime
7
+ from ..core.pydantic_utilities import deep_union_pydantic_dicts, pydantic_v1
8
+
9
+
10
+ class ScenarioInputJsonVariableValue(pydantic_v1.BaseModel):
11
+ """
12
+ Prompt Sandbox Scenario input value that is of type JSON
13
+ """
14
+
15
+ value: typing.Any
16
+ input_variable_id: str
17
+
18
+ def json(self, **kwargs: typing.Any) -> str:
19
+ kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
20
+ return super().json(**kwargs_with_defaults)
21
+
22
+ def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
23
+ kwargs_with_defaults_exclude_unset: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
24
+ kwargs_with_defaults_exclude_none: typing.Any = {"by_alias": True, "exclude_none": True, **kwargs}
25
+
26
+ return deep_union_pydantic_dicts(
27
+ super().dict(**kwargs_with_defaults_exclude_unset), super().dict(**kwargs_with_defaults_exclude_none)
28
+ )
29
+
30
+ class Config:
31
+ frozen = True
32
+ smart_union = True
33
+ extra = pydantic_v1.Extra.allow
34
+ json_encoders = {dt.datetime: serialize_datetime}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.7.6
3
+ Version: 0.7.7
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -1,8 +1,8 @@
1
- vellum/__init__.py,sha256=eqkGPne9ahxtbVtLlx7i1BxCA-RBaWg3qGRTCLw6lTA,49657
1
+ vellum/__init__.py,sha256=Y08IiuMhi07kjouldi6dHJvZRt8cwdEVSKrG0N9BzGM,49953
2
2
  vellum/client.py,sha256=FEelOptuh8ylBnqSznSXvIUj2LWGTEPDTPrK5sgQkSE,83651
3
3
  vellum/core/__init__.py,sha256=UFXpYzcGxWQUucU1TkjOQ9mGWN3A5JohluOIWVYKU4I,973
4
4
  vellum/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
5
- vellum/core/client_wrapper.py,sha256=Z9KoCufJbg73Hf7nVHFk9DnB5JNBULg1oQsf7_0BQbE,1873
5
+ vellum/core/client_wrapper.py,sha256=kfr5YYYdHYEJSOwIjHKQZK3Nqy8Go2DfxcCLmF2Mxz0,1873
6
6
  vellum/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
7
7
  vellum/core/file.py,sha256=sy1RUGZ3aJYuw998bZytxxo6QdgKmlnlgBaMvwEKCGg,1480
8
8
  vellum/core/http_client.py,sha256=46CyqS5Y8MwWTclAXnb1z5-ODJfwfHYbyhvjhb7RY1c,18753
@@ -41,7 +41,7 @@ vellum/resources/documents/client.py,sha256=lrRR9wp5nnMnENycYm-FrWwKIy7tKrfpHQ5L
41
41
  vellum/resources/folder_entities/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
42
42
  vellum/resources/folder_entities/client.py,sha256=EZ_RjrB87rPLoaqNC44Dkrhp7aWEqEqI2pm5bekMqLw,4359
43
43
  vellum/resources/ml_models/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
44
- vellum/resources/ml_models/client.py,sha256=1pXtLSTr3J7Jdu5INAGqyRFN8FEEZZ2DZ54k02HXR48,26644
44
+ vellum/resources/ml_models/client.py,sha256=kInb7AgmXqcO9Fdvy2u_pnbeg4aUlDVD-3dCTMUxn3o,26662
45
45
  vellum/resources/sandboxes/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
46
46
  vellum/resources/sandboxes/client.py,sha256=Vn80xkXWKZ8llBQSSoSqs9NU62mP1BBpNxgRBpDdLy8,15204
47
47
  vellum/resources/test_suite_runs/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -63,7 +63,7 @@ vellum/terraform/document_index/__init__.py,sha256=GY4Sn8X8-TgNiW_2Rph2uvY6tmJ6q
63
63
  vellum/terraform/provider/__init__.py,sha256=YYQLWWJDslcjc1eN0N719A3wqMdbNR2c3WuqGtX1U_I,12684
64
64
  vellum/terraform/py.typed,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
65
65
  vellum/terraform/versions.json,sha256=L-eKdEx1tp0hyZY8TGQ0Gy1a5R7q9hHAMLrCXICurZo,56
66
- vellum/types/__init__.py,sha256=OymATkVwsBVnU8eA1JZ9okU8N07VwDMUsWWl9rdGY5s,66253
66
+ vellum/types/__init__.py,sha256=74SSEdsGGOEXMsJZJwqRExTDFUxadMjmPdS_YpK5710,66645
67
67
  vellum/types/add_openai_api_key_enum.py,sha256=GB7sLK_Ou7-Xn73sKJHUo6Gx3TjyhU7uJvWZAg4UeaI,92
68
68
  vellum/types/api_node_result.py,sha256=H25_pCtZ9xT6GTbQG1Gae89yjK-45yKipSLNqfc2PTk,1246
69
69
  vellum/types/api_node_result_data.py,sha256=1MIuC_rN_4ul7PQVWPbQySrLKzwwVb5sjXO6G6v13z4,1424
@@ -219,7 +219,8 @@ vellum/types/ml_model_tokenizer_config.py,sha256=4E01CgGUU5FlHja7wnMio8pSpfnHStH
219
219
  vellum/types/ml_model_tokenizer_config_request.py,sha256=mOLzJIidqLnkjg3Sj1JvHhX-e7ATK_tGuZt7oZ5W-Hc,2365
220
220
  vellum/types/ml_model_usage.py,sha256=IFbXxMXf-4bkGuOzPIgfm0acZ4lVRI9C6uQpeO4_0o8,1349
221
221
  vellum/types/named_scenario_input_chat_history_variable_value_request.py,sha256=CfGCHcFOEWL3UV-VjA6pYyExnpAabrlQ44c6LxdhjyA,1361
222
- vellum/types/named_scenario_input_request.py,sha256=-GRkDZGwkhlaIwDcTenP_-u4fH79bRE0rRG5Ddi9bSs,2452
222
+ vellum/types/named_scenario_input_json_variable_value_request.py,sha256=r6jsE_wRPQHudeTaKDrSlKYVH2Nu4F6PlSaKx21o4GM,1248
223
+ vellum/types/named_scenario_input_request.py,sha256=KGuT0bFtt3rmBJHmWkTlD8zKqRtmZKfP865aHifi6XM,3452
223
224
  vellum/types/named_scenario_input_string_variable_value_request.py,sha256=tlJ8G8ru1BQHxdjfBpMiwKhQOHOw3mROuyJzG3KcGow,1269
224
225
  vellum/types/named_test_case_array_variable_value.py,sha256=reOwe29S23QepGANR5yGWpKpUUQ_SUDPg5TCBBEnEDA,1323
225
226
  vellum/types/named_test_case_array_variable_value_request.py,sha256=-_F7ymtx32IfSCSgoHjHXDE1kcloeQvZdy8lhRY1aCI,1352
@@ -319,8 +320,9 @@ vellum/types/rejected_workflow_node_result_event.py,sha256=n0yp5qdEyTEsnSVRAWPFU
319
320
  vellum/types/release_tag_source.py,sha256=YavosOXZ976yfXTNWRTZwh2HhRiYmSDk0bQCkl-jCoQ,158
320
321
  vellum/types/replace_test_suite_test_case_request.py,sha256=70JYolE2hwB52LU5gpkEgF3bKC-epaeDeuJrgPzJDmE,2209
321
322
  vellum/types/sandbox_scenario.py,sha256=1vp9eQhMMPpiHfSWoAtTUpDmx0jy4VMWW9ZzHlaj7Yk,1407
322
- vellum/types/scenario_input.py,sha256=o98RTH7IDbXBCBKF2JKiCPvUkkloYDhWJjvGxMgQVKA,2396
323
+ vellum/types/scenario_input.py,sha256=fMFI-FPKq6WBA1I_h_XrheeVN27JoqU_YB5vCGlD-ho,3379
323
324
  vellum/types/scenario_input_chat_history_variable_value.py,sha256=l63tBZzYwgtcBVB_-Iph-ylRRQMV7FkD7fZVbadLk2c,1334
325
+ vellum/types/scenario_input_json_variable_value.py,sha256=SXZ7stmJ2DmXcYdmiwTkvKahP87EiCcau3QoRTn-q9I,1243
324
326
  vellum/types/scenario_input_string_variable_value.py,sha256=QEeWZyJWvEeEParxhVs4n5sJ6fztkz4uA1CVqnIrVEw,1264
325
327
  vellum/types/search_filters_request.py,sha256=3k90Kq2KWu8p9a-CYtkcybS7ODNosNLgBIk-ZL0uQkM,1481
326
328
  vellum/types/search_node_result.py,sha256=Yz9S17nCtOTQDkhWM_hPUOHG8ZttG1TQukzSVjnWGH4,1260
@@ -494,7 +496,7 @@ vellum/types/workflow_result_event_output_data_search_results.py,sha256=_C4ueKK8
494
496
  vellum/types/workflow_result_event_output_data_string.py,sha256=AAWHZT3X9HOIRA3UuIqw0VpfSGwGemsJM71WDNbWYTc,1745
495
497
  vellum/types/workflow_stream_event.py,sha256=5K-Mtn9fvJDq8m5nhURDbChL01PXIiuIZDkfAC1d6fU,2610
496
498
  vellum/version.py,sha256=neLt8HBHHUtDF9M5fsyUzHT-pKooEPvceaLDqqIGb0s,77
497
- vellum_ai-0.7.6.dist-info/LICENSE,sha256=CcaljEIoOBaU-wItPH4PmM_mDCGpyuUY0Er1BGu5Ti8,1073
498
- vellum_ai-0.7.6.dist-info/METADATA,sha256=Px_DEw78UFucYoZd9Z3V9a8pjtuPXhl8E2AcMkjRNvc,4398
499
- vellum_ai-0.7.6.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
500
- vellum_ai-0.7.6.dist-info/RECORD,,
499
+ vellum_ai-0.7.7.dist-info/LICENSE,sha256=CcaljEIoOBaU-wItPH4PmM_mDCGpyuUY0Er1BGu5Ti8,1073
500
+ vellum_ai-0.7.7.dist-info/METADATA,sha256=79Tnc1WEpabCIADsOlXGjR7EKNmvC7oe0OPjzrpSYdU,4398
501
+ vellum_ai-0.7.7.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
502
+ vellum_ai-0.7.7.dist-info/RECORD,,