vellum-ai 0.0.30__py3-none-any.whl → 0.0.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vellum/client.py CHANGED
@@ -109,12 +109,14 @@ class Vellum:
109
109
  continue
110
110
  yield pydantic.parse_obj_as(WorkflowStreamEvent, json.loads(_text)) # type: ignore
111
111
  return
112
+ _response.read()
113
+ if _response.status_code == 400:
114
+ raise BadRequestError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
112
115
  if _response.status_code == 404:
113
116
  raise NotFoundError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
114
117
  if _response.status_code == 500:
115
118
  raise InternalServerError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
116
119
  try:
117
- _response.read()
118
120
  _response_json = _response.json()
119
121
  except JSONDecodeError:
120
122
  raise ApiError(status_code=_response.status_code, body=_response.text)
@@ -218,6 +220,7 @@ class Vellum:
218
220
  continue
219
221
  yield pydantic.parse_obj_as(GenerateStreamResponse, json.loads(_text)) # type: ignore
220
222
  return
223
+ _response.read()
221
224
  if _response.status_code == 400:
222
225
  raise BadRequestError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
223
226
  if _response.status_code == 403:
@@ -227,7 +230,6 @@ class Vellum:
227
230
  if _response.status_code == 500:
228
231
  raise InternalServerError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
229
232
  try:
230
- _response.read()
231
233
  _response_json = _response.json()
232
234
  except JSONDecodeError:
233
235
  raise ApiError(status_code=_response.status_code, body=_response.text)
@@ -405,12 +407,14 @@ class AsyncVellum:
405
407
  continue
406
408
  yield pydantic.parse_obj_as(WorkflowStreamEvent, json.loads(_text)) # type: ignore
407
409
  return
410
+ await _response.aread()
411
+ if _response.status_code == 400:
412
+ raise BadRequestError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
408
413
  if _response.status_code == 404:
409
414
  raise NotFoundError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
410
415
  if _response.status_code == 500:
411
416
  raise InternalServerError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
412
417
  try:
413
- await _response.aread()
414
418
  _response_json = _response.json()
415
419
  except JSONDecodeError:
416
420
  raise ApiError(status_code=_response.status_code, body=_response.text)
@@ -514,6 +518,7 @@ class AsyncVellum:
514
518
  continue
515
519
  yield pydantic.parse_obj_as(GenerateStreamResponse, json.loads(_text)) # type: ignore
516
520
  return
521
+ await _response.aread()
517
522
  if _response.status_code == 400:
518
523
  raise BadRequestError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
519
524
  if _response.status_code == 403:
@@ -523,7 +528,6 @@ class AsyncVellum:
523
528
  if _response.status_code == 500:
524
529
  raise InternalServerError(pydantic.parse_obj_as(typing.Any, _response.json())) # type: ignore
525
530
  try:
526
- await _response.aread()
527
531
  _response_json = _response.json()
528
532
  except JSONDecodeError:
529
533
  raise ApiError(status_code=_response.status_code, body=_response.text)
@@ -10,7 +10,11 @@ class BaseClientWrapper:
10
10
  self.api_key = api_key
11
11
 
12
12
  def get_headers(self) -> typing.Dict[str, str]:
13
- headers: typing.Dict[str, str] = {}
13
+ headers: typing.Dict[str, str] = {
14
+ "X-Fern-Language": "Python",
15
+ "X-Fern-SDK-Name": "vellum-ai",
16
+ "X-Fern-SDK-Version": "v0.0.31",
17
+ }
14
18
  headers["X_API_KEY"] = self.api_key
15
19
  return headers
16
20
 
@@ -11,7 +11,7 @@ from .chat_message import ChatMessage
11
11
 
12
12
  class TerminalNodeChatHistoryResult(pydantic.BaseModel):
13
13
  name: str = pydantic.Field(description="The unique name given to the terminal node that produced this output.")
14
- value: typing.List[ChatMessage]
14
+ value: typing.Optional[typing.List[ChatMessage]]
15
15
 
16
16
  def json(self, **kwargs: typing.Any) -> str:
17
17
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -10,7 +10,7 @@ from ..core.datetime_utils import serialize_datetime
10
10
 
11
11
  class TerminalNodeJsonResult(pydantic.BaseModel):
12
12
  name: str = pydantic.Field(description="The unique name given to the terminal node that produced this output.")
13
- value: typing.Dict[str, typing.Any]
13
+ value: typing.Optional[typing.Dict[str, typing.Any]]
14
14
 
15
15
  def json(self, **kwargs: typing.Any) -> str:
16
16
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -10,7 +10,7 @@ from ..core.datetime_utils import serialize_datetime
10
10
 
11
11
  class TerminalNodeStringResult(pydantic.BaseModel):
12
12
  name: str = pydantic.Field(description="The unique name given to the terminal node that produced this output.")
13
- value: str
13
+ value: typing.Optional[str]
14
14
 
15
15
  def json(self, **kwargs: typing.Any) -> str:
16
16
  kwargs_with_defaults: typing.Any = {"by_alias": True, "exclude_unset": True, **kwargs}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.0.30
3
+ Version: 0.0.31
4
4
  Summary:
5
5
  Requires-Python: >=3.7,<4.0
6
6
  Classifier: Programming Language :: Python :: 3
@@ -1,8 +1,8 @@
1
1
  vellum/__init__.py,sha256=HrSVslaxHhe2HO6epFwF49_XWDS4HhbfNRrSNVqZdss,9127
2
- vellum/client.py,sha256=Kxyp8ZazvSbjIEEoBAVCDJeqIHCfW4OMa5vZl84cRYQ,32325
2
+ vellum/client.py,sha256=FXZCbMfR3jgPMXAdobRkPpInZN1dz4kOjKPgRsyvSEQ,32613
3
3
  vellum/core/__init__.py,sha256=QJS3CJ2TYP2E1Tge0CS6Z7r8LTNzJHQVX1hD3558eP0,519
4
4
  vellum/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
5
- vellum/core/client_wrapper.py,sha256=wNp6eZVx2wfl0CBL2eRja6VzGPjd-VH0F27tkE1YSGk,758
5
+ vellum/core/client_wrapper.py,sha256=svrxCUFTlYC-vgNq0OcVMPv2gWSuK7bbuGWyWJ2Y7f8,897
6
6
  vellum/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
7
7
  vellum/core/jsonable_encoder.py,sha256=yHrx0C19n1H77G-GanO-HsFyBPVMlsJz7WffsHwXEVI,3710
8
8
  vellum/core/remove_none_from_dict.py,sha256=8m91FC3YuVem0Gm9_sXhJ2tGvP33owJJdrqCLEdowGw,330
@@ -123,12 +123,12 @@ vellum/types/search_weights_request.py,sha256=EdAbOOXOWXCq0C3lziyRHTuNIlODw0BECQ
123
123
  vellum/types/slim_document.py,sha256=BnS2YA9JLj6dBm21Z-GXnPSgi1T-RWWTaIjx2RKpk84,2820
124
124
  vellum/types/submit_completion_actual_request.py,sha256=k5BHx1JUfq8XcLbBZOsbSNzVA3xNVNVFsJkM7tMtXpo,1656
125
125
  vellum/types/submit_completion_actuals_error_response.py,sha256=8ZNcGD7l4crYQoKn4LTdMKbRVGHs3v-M3ER9lTkKzmM,772
126
- vellum/types/terminal_node_chat_history_result.py,sha256=SvlvoLfQFxUNRpaBNAbE6X72X54GrM1ERmaZ373efto,939
127
- vellum/types/terminal_node_json_result.py,sha256=X9Dzwy25RcBLU_8s2pK5bc-nHWRfoOUslJyKYAj0SsU,898
126
+ vellum/types/terminal_node_chat_history_result.py,sha256=RzqPzyUswQ86pXHgjCPMhmkBhqjQVcTcLipwygXNQqY,956
127
+ vellum/types/terminal_node_json_result.py,sha256=d85-Q-1NJLk8cVq6mWHUgQssYABe0sNHqkAOpV0l7Dk,915
128
128
  vellum/types/terminal_node_result.py,sha256=dNdckayDfIQ_LR9IKfklxyFfJErPgtxA9tzsDUXjiPA,833
129
129
  vellum/types/terminal_node_result_data.py,sha256=4fsLNH_jkTOIKViXfHavkpE2GhB5NBUJdnVni82tEpk,845
130
130
  vellum/types/terminal_node_result_output.py,sha256=QkdMPv5SH8gvtfVd2md9KoAe2inIBMINGul94ZZ-YXE,1107
131
- vellum/types/terminal_node_string_result.py,sha256=puo54vwX0G-3zDZVRlz5deVA8K-lOQQt0Lu50g8zWmA,875
131
+ vellum/types/terminal_node_string_result.py,sha256=xl19fA2GR705Qx0MFQsz0vgl1XApcJL0njue85gMeCo,892
132
132
  vellum/types/test_suite_test_case.py,sha256=_zk_wcESjEWG_fePNPLXsmvpFJmB_bPueacVLVeihNg,1439
133
133
  vellum/types/upload_document_error_response.py,sha256=VNSsUbd0TSCKnlBohoKv1h3AJDJeicH-q3kjkXpxKG4,763
134
134
  vellum/types/upload_document_response.py,sha256=wi6kn2SWkE5ZlbSQvGXKuQUZInz-2ik1ib7A4kqvkGo,833
@@ -150,6 +150,6 @@ vellum/types/workflow_result_event_output_data_chat_history.py,sha256=viyNe9VF8-
150
150
  vellum/types/workflow_result_event_output_data_json.py,sha256=-zLIOCCOurui_vXE_WJlaDO5xfTQx81EGyWdgSurItk,1125
151
151
  vellum/types/workflow_result_event_output_data_string.py,sha256=EqTpT1SJxagtuFCxFkJuze64MgUjGuZlzYEOqnR1UGM,1253
152
152
  vellum/types/workflow_stream_event.py,sha256=sVpX2OBZq-vVn8A74opHnV_pD83eoICS5_aJ1H9WjxM,819
153
- vellum_ai-0.0.30.dist-info/METADATA,sha256=FOI3xQkEP5P61_nFQvadknFxElHIlCOPVy6470SVcus,3487
154
- vellum_ai-0.0.30.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
155
- vellum_ai-0.0.30.dist-info/RECORD,,
153
+ vellum_ai-0.0.31.dist-info/METADATA,sha256=mM6eMuIL0jR9oUFxNFfOVt7SAygB6X9mkiwbGVe6UbU,3487
154
+ vellum_ai-0.0.31.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
155
+ vellum_ai-0.0.31.dist-info/RECORD,,