vellum-ai 0.7.7__py3-none-any.whl → 0.7.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.7.7",
21
+ "X-Fern-SDK-Version": "0.7.8",
22
22
  }
23
23
  headers["X_API_KEY"] = self.api_key
24
24
  return headers
@@ -85,9 +85,9 @@ class MlModelsClient:
85
85
  *,
86
86
  name: str,
87
87
  family: MlModelFamily,
88
+ hosted_by: HostedByEnum,
89
+ developed_by: MlModelDeveloper,
88
90
  exec_config: MlModelExecConfigRequest,
89
- hosted_by: typing.Optional[HostedByEnum] = OMIT,
90
- developed_by: typing.Optional[MlModelDeveloper] = OMIT,
91
91
  parameter_config: typing.Optional[MlModelParameterConfigRequest] = OMIT,
92
92
  display_config: typing.Optional[MlModelDisplayConfigRequest] = OMIT,
93
93
  visibility: typing.Optional[VisibilityEnum] = OMIT,
@@ -125,10 +125,7 @@ class MlModelsClient:
125
125
  * `YI` - Yi
126
126
  * `ZEPHYR` - Zephyr
127
127
 
128
- exec_config : MlModelExecConfigRequest
129
- Configuration for how to execute the ML Model.
130
-
131
- hosted_by : typing.Optional[HostedByEnum]
128
+ hosted_by : HostedByEnum
132
129
  The organization hosting the ML Model.
133
130
 
134
131
  * `ANTHROPIC` - ANTHROPIC
@@ -149,7 +146,7 @@ class MlModelsClient:
149
146
  * `PYQ` - PYQ
150
147
  * `REPLICATE` - REPLICATE
151
148
 
152
- developed_by : typing.Optional[MlModelDeveloper]
149
+ developed_by : MlModelDeveloper
153
150
  The organization that developed the ML Model.
154
151
 
155
152
  * `01_AI` - 01_AI
@@ -171,6 +168,9 @@ class MlModelsClient:
171
168
  * `TII` - TII
172
169
  * `WIZARDLM` - WIZARDLM
173
170
 
171
+ exec_config : MlModelExecConfigRequest
172
+ Configuration for how to execute the ML Model.
173
+
174
174
  parameter_config : typing.Optional[MlModelParameterConfigRequest]
175
175
  Configuration for the ML Model's parameters.
176
176
 
@@ -204,6 +204,8 @@ class MlModelsClient:
204
204
  client.ml_models.create(
205
205
  name="name",
206
206
  family="CAPYBARA",
207
+ hosted_by="ANTHROPIC",
208
+ developed_by="01_AI",
207
209
  exec_config=MlModelExecConfigRequest(
208
210
  model_identifier="model_identifier",
209
211
  base_url="base_url",
@@ -475,9 +477,9 @@ class AsyncMlModelsClient:
475
477
  *,
476
478
  name: str,
477
479
  family: MlModelFamily,
480
+ hosted_by: HostedByEnum,
481
+ developed_by: MlModelDeveloper,
478
482
  exec_config: MlModelExecConfigRequest,
479
- hosted_by: typing.Optional[HostedByEnum] = OMIT,
480
- developed_by: typing.Optional[MlModelDeveloper] = OMIT,
481
483
  parameter_config: typing.Optional[MlModelParameterConfigRequest] = OMIT,
482
484
  display_config: typing.Optional[MlModelDisplayConfigRequest] = OMIT,
483
485
  visibility: typing.Optional[VisibilityEnum] = OMIT,
@@ -515,10 +517,7 @@ class AsyncMlModelsClient:
515
517
  * `YI` - Yi
516
518
  * `ZEPHYR` - Zephyr
517
519
 
518
- exec_config : MlModelExecConfigRequest
519
- Configuration for how to execute the ML Model.
520
-
521
- hosted_by : typing.Optional[HostedByEnum]
520
+ hosted_by : HostedByEnum
522
521
  The organization hosting the ML Model.
523
522
 
524
523
  * `ANTHROPIC` - ANTHROPIC
@@ -539,7 +538,7 @@ class AsyncMlModelsClient:
539
538
  * `PYQ` - PYQ
540
539
  * `REPLICATE` - REPLICATE
541
540
 
542
- developed_by : typing.Optional[MlModelDeveloper]
541
+ developed_by : MlModelDeveloper
543
542
  The organization that developed the ML Model.
544
543
 
545
544
  * `01_AI` - 01_AI
@@ -561,6 +560,9 @@ class AsyncMlModelsClient:
561
560
  * `TII` - TII
562
561
  * `WIZARDLM` - WIZARDLM
563
562
 
563
+ exec_config : MlModelExecConfigRequest
564
+ Configuration for how to execute the ML Model.
565
+
564
566
  parameter_config : typing.Optional[MlModelParameterConfigRequest]
565
567
  Configuration for the ML Model's parameters.
566
568
 
@@ -599,6 +601,8 @@ class AsyncMlModelsClient:
599
601
  await client.ml_models.create(
600
602
  name="name",
601
603
  family="CAPYBARA",
604
+ hosted_by="ANTHROPIC",
605
+ developed_by="01_AI",
602
606
  exec_config=MlModelExecConfigRequest(
603
607
  model_identifier="model_identifier",
604
608
  base_url="base_url",
@@ -16,7 +16,6 @@ class MlModelExecConfig(pydantic_v1.BaseModel):
16
16
  base_url: str
17
17
  metadata: typing.Dict[str, typing.Any]
18
18
  features: typing.List[MlModelFeature]
19
- force_system_credentials: typing.Optional[bool] = None
20
19
  tokenizer_config: typing.Optional[MlModelTokenizerConfig] = None
21
20
  request_config: typing.Optional[MlModelRequestConfig] = None
22
21
  response_config: typing.Optional[MlModelResponseConfig] = None
@@ -16,7 +16,6 @@ class MlModelExecConfigRequest(pydantic_v1.BaseModel):
16
16
  base_url: str
17
17
  metadata: typing.Dict[str, typing.Any]
18
18
  features: typing.List[MlModelFeature]
19
- force_system_credentials: typing.Optional[bool] = None
20
19
  tokenizer_config: typing.Optional[MlModelTokenizerConfigRequest] = None
21
20
  request_config: typing.Optional[MlModelRequestConfigRequest] = None
22
21
  response_config: typing.Optional[MlModelResponseConfigRequest] = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.7.7
3
+ Version: 0.7.8
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.8,<4.0
@@ -2,7 +2,7 @@ vellum/__init__.py,sha256=Y08IiuMhi07kjouldi6dHJvZRt8cwdEVSKrG0N9BzGM,49953
2
2
  vellum/client.py,sha256=FEelOptuh8ylBnqSznSXvIUj2LWGTEPDTPrK5sgQkSE,83651
3
3
  vellum/core/__init__.py,sha256=UFXpYzcGxWQUucU1TkjOQ9mGWN3A5JohluOIWVYKU4I,973
4
4
  vellum/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
5
- vellum/core/client_wrapper.py,sha256=kfr5YYYdHYEJSOwIjHKQZK3Nqy8Go2DfxcCLmF2Mxz0,1873
5
+ vellum/core/client_wrapper.py,sha256=zAmOvT3hIgcE7oJ1_UMRHNE5WUsLBY-bplg8t5xrwlo,1873
6
6
  vellum/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
7
7
  vellum/core/file.py,sha256=sy1RUGZ3aJYuw998bZytxxo6QdgKmlnlgBaMvwEKCGg,1480
8
8
  vellum/core/http_client.py,sha256=46CyqS5Y8MwWTclAXnb1z5-ODJfwfHYbyhvjhb7RY1c,18753
@@ -41,7 +41,7 @@ vellum/resources/documents/client.py,sha256=lrRR9wp5nnMnENycYm-FrWwKIy7tKrfpHQ5L
41
41
  vellum/resources/folder_entities/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
42
42
  vellum/resources/folder_entities/client.py,sha256=EZ_RjrB87rPLoaqNC44Dkrhp7aWEqEqI2pm5bekMqLw,4359
43
43
  vellum/resources/ml_models/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
44
- vellum/resources/ml_models/client.py,sha256=kInb7AgmXqcO9Fdvy2u_pnbeg4aUlDVD-3dCTMUxn3o,26662
44
+ vellum/resources/ml_models/client.py,sha256=ygrjdLSoUBJIHjT4AtofxcWezx-E1pO5-tgpt4XvwvI,26644
45
45
  vellum/resources/sandboxes/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
46
46
  vellum/resources/sandboxes/client.py,sha256=Vn80xkXWKZ8llBQSSoSqs9NU62mP1BBpNxgRBpDdLy8,15204
47
47
  vellum/resources/test_suite_runs/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -200,8 +200,8 @@ vellum/types/ml_model_display_config_labelled.py,sha256=SleUaRCUTc3nFAqU6R8UOOzR
200
200
  vellum/types/ml_model_display_config_request.py,sha256=WfPdaLzX0X0w4V-3hQ1A8JOuej1mqsevEzkj8pW-ry4,1242
201
201
  vellum/types/ml_model_display_tag.py,sha256=mvxWDmFB7nxMiO9f9XEueWoXL3YmkPfqvroDVzVXNyE,229
202
202
  vellum/types/ml_model_display_tag_enum_value_label.py,sha256=r8_Nxz3TOBcEhWmVVgCbrI0h2x85Q8RsGA-TLZOBBe0,1213
203
- vellum/types/ml_model_exec_config.py,sha256=dveJCP_BOy4VcNP9Uv15ISUTyRA2Z5DRauDWT2weSos,1717
204
- vellum/types/ml_model_exec_config_request.py,sha256=ul1FDz4JS-G94E9EP4LCFGi3c-Us9kEReKlXYOfKu9M,1790
203
+ vellum/types/ml_model_exec_config.py,sha256=bZiGQ2XQ2M67eJHzOM9wZIE3oq2Mee9B-9InNj5-Ew4,1658
204
+ vellum/types/ml_model_exec_config_request.py,sha256=Fy5nGCj7zXCnB6N1fs0cmdRCRV_hvYYNzS2NMXH47jE,1731
205
205
  vellum/types/ml_model_family.py,sha256=96a6cigZeoGoM4VLvwvSmIKnEVBAI8RsW7xaS3EZZDI,513
206
206
  vellum/types/ml_model_family_enum_value_label.py,sha256=ldXOZN0ZV277Q9qSjli3kJJ47ozzDe50d1DanB_l9v4,1196
207
207
  vellum/types/ml_model_feature.py,sha256=s3DTCKk5m8MBdVV4-gHGe3Oj4B38fS6I1LLj8pps4Lo,441
@@ -496,7 +496,7 @@ vellum/types/workflow_result_event_output_data_search_results.py,sha256=_C4ueKK8
496
496
  vellum/types/workflow_result_event_output_data_string.py,sha256=AAWHZT3X9HOIRA3UuIqw0VpfSGwGemsJM71WDNbWYTc,1745
497
497
  vellum/types/workflow_stream_event.py,sha256=5K-Mtn9fvJDq8m5nhURDbChL01PXIiuIZDkfAC1d6fU,2610
498
498
  vellum/version.py,sha256=neLt8HBHHUtDF9M5fsyUzHT-pKooEPvceaLDqqIGb0s,77
499
- vellum_ai-0.7.7.dist-info/LICENSE,sha256=CcaljEIoOBaU-wItPH4PmM_mDCGpyuUY0Er1BGu5Ti8,1073
500
- vellum_ai-0.7.7.dist-info/METADATA,sha256=79Tnc1WEpabCIADsOlXGjR7EKNmvC7oe0OPjzrpSYdU,4398
501
- vellum_ai-0.7.7.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
502
- vellum_ai-0.7.7.dist-info/RECORD,,
499
+ vellum_ai-0.7.8.dist-info/LICENSE,sha256=CcaljEIoOBaU-wItPH4PmM_mDCGpyuUY0Er1BGu5Ti8,1073
500
+ vellum_ai-0.7.8.dist-info/METADATA,sha256=R5nnOWBhtOMhaXi9kk1yndyxQnvBrb-nkVdWAnHmrEs,4398
501
+ vellum_ai-0.7.8.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
502
+ vellum_ai-0.7.8.dist-info/RECORD,,