vellum-ai 0.14.74__py3-none-any.whl → 0.14.75__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ class BaseClientWrapper:
18
18
  headers: typing.Dict[str, str] = {
19
19
  "X-Fern-Language": "Python",
20
20
  "X-Fern-SDK-Name": "vellum-ai",
21
- "X-Fern-SDK-Version": "0.14.74",
21
+ "X-Fern-SDK-Version": "0.14.75",
22
22
  }
23
23
  headers["X-API-KEY"] = self.api_key
24
24
  return headers
@@ -4270,7 +4270,15 @@ client.prompts.push(
4270
4270
  <dl>
4271
4271
  <dd>
4272
4272
 
4273
- **prompt_variant_id:** `typing.Optional[str]`
4273
+ **prompt_variant_id:** `typing.Optional[str]` — If specified, an existing Prompt Variant by the provided ID will be updated. Otherwise, a new Prompt Variant will be created and an ID generated.
4274
+
4275
+ </dd>
4276
+ </dl>
4277
+
4278
+ <dl>
4279
+ <dd>
4280
+
4281
+ **prompt_variant_label:** `typing.Optional[str]` — If provided, then the created/updated Prompt Variant will have this label.
4274
4282
 
4275
4283
  </dd>
4276
4284
  </dl>
@@ -110,6 +110,7 @@ class PromptsClient:
110
110
  *,
111
111
  exec_config: PromptExecConfig,
112
112
  prompt_variant_id: typing.Optional[str] = OMIT,
113
+ prompt_variant_label: typing.Optional[str] = OMIT,
113
114
  prompt_sandbox_id: typing.Optional[str] = OMIT,
114
115
  request_options: typing.Optional[RequestOptions] = None,
115
116
  ) -> PromptPushResponse:
@@ -121,6 +122,10 @@ class PromptsClient:
121
122
  exec_config : PromptExecConfig
122
123
 
123
124
  prompt_variant_id : typing.Optional[str]
125
+ If specified, an existing Prompt Variant by the provided ID will be updated. Otherwise, a new Prompt Variant will be created and an ID generated.
126
+
127
+ prompt_variant_label : typing.Optional[str]
128
+ If provided, then the created/updated Prompt Variant will have this label.
124
129
 
125
130
  prompt_sandbox_id : typing.Optional[str]
126
131
 
@@ -169,11 +174,12 @@ class PromptsClient:
169
174
  base_url=self._client_wrapper.get_environment().default,
170
175
  method="POST",
171
176
  json={
177
+ "prompt_variant_id": prompt_variant_id,
178
+ "prompt_variant_label": prompt_variant_label,
179
+ "prompt_sandbox_id": prompt_sandbox_id,
172
180
  "exec_config": convert_and_respect_annotation_metadata(
173
181
  object_=exec_config, annotation=PromptExecConfig, direction="write"
174
182
  ),
175
- "prompt_variant_id": prompt_variant_id,
176
- "prompt_sandbox_id": prompt_sandbox_id,
177
183
  },
178
184
  headers={
179
185
  "content-type": "application/json",
@@ -316,6 +322,7 @@ class AsyncPromptsClient:
316
322
  *,
317
323
  exec_config: PromptExecConfig,
318
324
  prompt_variant_id: typing.Optional[str] = OMIT,
325
+ prompt_variant_label: typing.Optional[str] = OMIT,
319
326
  prompt_sandbox_id: typing.Optional[str] = OMIT,
320
327
  request_options: typing.Optional[RequestOptions] = None,
321
328
  ) -> PromptPushResponse:
@@ -327,6 +334,10 @@ class AsyncPromptsClient:
327
334
  exec_config : PromptExecConfig
328
335
 
329
336
  prompt_variant_id : typing.Optional[str]
337
+ If specified, an existing Prompt Variant by the provided ID will be updated. Otherwise, a new Prompt Variant will be created and an ID generated.
338
+
339
+ prompt_variant_label : typing.Optional[str]
340
+ If provided, then the created/updated Prompt Variant will have this label.
330
341
 
331
342
  prompt_sandbox_id : typing.Optional[str]
332
343
 
@@ -383,11 +394,12 @@ class AsyncPromptsClient:
383
394
  base_url=self._client_wrapper.get_environment().default,
384
395
  method="POST",
385
396
  json={
397
+ "prompt_variant_id": prompt_variant_id,
398
+ "prompt_variant_label": prompt_variant_label,
399
+ "prompt_sandbox_id": prompt_sandbox_id,
386
400
  "exec_config": convert_and_respect_annotation_metadata(
387
401
  object_=exec_config, annotation=PromptExecConfig, direction="write"
388
402
  ),
389
- "prompt_variant_id": prompt_variant_id,
390
- "prompt_sandbox_id": prompt_sandbox_id,
391
403
  },
392
404
  headers={
393
405
  "content-type": "application/json",
@@ -457,21 +457,6 @@ class BaseNode(Generic[StateType], ABC, metaclass=BaseNodeMeta):
457
457
  resolved_value = resolve_value(descriptor.instance, self.state, path=descriptor.name, memo=inputs)
458
458
  setattr(self, descriptor.name, resolved_value)
459
459
 
460
- # Resolve descriptors set as defaults to the outputs class
461
- def _outputs_post_init(outputs_self: "BaseNode.Outputs", **kwargs: Any) -> None:
462
- for node_output_descriptor in self.Outputs:
463
- if node_output_descriptor.name in kwargs:
464
- continue
465
-
466
- if isinstance(node_output_descriptor.instance, BaseDescriptor):
467
- setattr(
468
- outputs_self,
469
- node_output_descriptor.name,
470
- node_output_descriptor.instance.resolve(self.state),
471
- )
472
-
473
- setattr(self.Outputs, "_outputs_post_init", _outputs_post_init)
474
-
475
460
  # We only want to store the attributes that were actually set as inputs, not every attribute that exists.
476
461
  all_inputs = {}
477
462
  for key, value in inputs.items():
@@ -3,6 +3,7 @@ import threading
3
3
  import time
4
4
 
5
5
  from vellum.workflows.inputs.base import BaseInputs
6
+ from vellum.workflows.nodes import FinalOutputNode
6
7
  from vellum.workflows.nodes.bases import BaseNode
7
8
  from vellum.workflows.nodes.core.map_node.node import MapNode
8
9
  from vellum.workflows.nodes.core.try_node.node import TryNode
@@ -221,3 +222,56 @@ def test_map_node_parallel_execution_with_workflow():
221
222
  # AND each item should have run on a different thread
222
223
  thread_ids_list = list(thread_ids.values())
223
224
  assert len(set(thread_ids_list)) == 3
225
+
226
+
227
+ def test_map_node__shared_state_race_condition():
228
+ processed_items = []
229
+
230
+ # GIVEN a templating node that processes items
231
+ class TemplatingNode(BaseNode):
232
+ item = MapNode.SubworkflowInputs.item
233
+
234
+ class Outputs(BaseOutputs):
235
+ processed_item: str
236
+
237
+ def run(self) -> Outputs:
238
+ processed_item = f"{self.item}!"
239
+ return self.Outputs(processed_item=processed_item)
240
+
241
+ # AND a final output node
242
+ class FinalOutput(FinalOutputNode[BaseState, str]):
243
+ class Outputs(FinalOutputNode.Outputs):
244
+ value = TemplatingNode.Outputs.processed_item
245
+
246
+ def run(self) -> Outputs:
247
+ outputs = super().run()
248
+ processed_items.append(outputs.value)
249
+ return outputs # type: ignore[return-value]
250
+
251
+ # AND a workflow using those nodes
252
+ class ProcessItemWorkflow(BaseWorkflow[MapNode.SubworkflowInputs, BaseState]):
253
+ graph = TemplatingNode >> FinalOutput
254
+
255
+ class Outputs(BaseWorkflow.Outputs):
256
+ result = FinalOutput.Outputs.value
257
+
258
+ # AND a map node with high concurrency
259
+ class RaceConditionMapNode(MapNode):
260
+ items = ["a", "b", "c", "d", "e", "f"]
261
+ subworkflow = ProcessItemWorkflow
262
+ max_concurrency = 1
263
+
264
+ # WHEN we run the map node multiple times to see pass consistently
265
+ num_runs = 50
266
+ for index in range(num_runs):
267
+ processed_items.clear()
268
+ node = RaceConditionMapNode(state=BaseState())
269
+ outputs = list(node.run())
270
+ final_result = outputs[-1].value
271
+
272
+ # THEN the state is unique among each run
273
+ assert len(set(processed_items)) == 6
274
+
275
+ # AND all results should be in correct order
276
+ expected_result = ["a!", "b!", "c!", "d!", "e!", "f!"]
277
+ assert final_result == expected_result, f"Failed on run {index}"
@@ -1,3 +1,4 @@
1
+ import inspect
1
2
  from typing import TYPE_CHECKING, Any, Generic, Iterator, Set, Tuple, Type, TypeVar, Union, cast
2
3
  from typing_extensions import dataclass_transform
3
4
 
@@ -9,6 +10,7 @@ from vellum.workflows.descriptors.base import BaseDescriptor
9
10
  from vellum.workflows.errors.types import WorkflowErrorCode
10
11
  from vellum.workflows.exceptions import NodeException
11
12
  from vellum.workflows.references.output import OutputReference
13
+ from vellum.workflows.types.generics import is_node_instance
12
14
  from vellum.workflows.types.utils import get_class_attr_names, infer_types
13
15
 
14
16
  if TYPE_CHECKING:
@@ -198,8 +200,30 @@ class BaseOutputs(metaclass=_BaseOutputsMeta):
198
200
  for name, value in kwargs.items():
199
201
  setattr(self, name, value)
200
202
 
201
- if hasattr(self, "_outputs_post_init") and callable(self._outputs_post_init):
202
- self._outputs_post_init(**kwargs)
203
+ # If climb up the to the caller's frame, and if it's a BaseNode instance, it should
204
+ # have a state attribute that we can use to resolve the output descriptors.
205
+ frame = inspect.currentframe()
206
+ if not frame:
207
+ return
208
+
209
+ caller_frame = frame.f_back
210
+ if not caller_frame or "self" not in caller_frame.f_locals:
211
+ return
212
+
213
+ caller_self = caller_frame.f_locals["self"]
214
+ if not is_node_instance(caller_self):
215
+ return
216
+
217
+ for node_output_descriptor in self.__class__:
218
+ if node_output_descriptor.name in kwargs:
219
+ continue
220
+
221
+ if isinstance(node_output_descriptor.instance, BaseDescriptor):
222
+ setattr(
223
+ self,
224
+ node_output_descriptor.name,
225
+ node_output_descriptor.instance.resolve(caller_self.state),
226
+ )
203
227
 
204
228
  def __eq__(self, other: object) -> bool:
205
229
  if not isinstance(other, (dict, BaseOutputs)):
@@ -42,6 +42,11 @@ def is_node_class(obj: Any) -> TypeGuard[Type["BaseNode"]]:
42
42
  return isinstance(obj, type) and issubclass(obj, base_node_class)
43
43
 
44
44
 
45
+ def is_node_instance(obj: Any) -> TypeGuard["BaseNode"]:
46
+ base_node_class = _import_node_class()
47
+ return isinstance(obj, base_node_class)
48
+
49
+
45
50
  def is_workflow_class(obj: Any) -> TypeGuard[Type["BaseWorkflow"]]:
46
51
  base_workflow_class = import_workflow_class()
47
52
  return isinstance(obj, type) and issubclass(obj, base_workflow_class)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: vellum-ai
3
- Version: 0.14.74
3
+ Version: 0.14.75
4
4
  Summary:
5
5
  License: MIT
6
6
  Requires-Python: >=3.9,<4.0
@@ -144,7 +144,7 @@ vellum/client/README.md,sha256=CuGUYnaE0Imt0KqQ4sIPaUghCjLHkF3DdEvZWu14-8s,4807
144
144
  vellum/client/__init__.py,sha256=AYopGv2ZRVn3zsU8_km6KOvEHDbXiTPCVuYVI7bWvdA,120166
145
145
  vellum/client/core/__init__.py,sha256=SQ85PF84B9MuKnBwHNHWemSGuy-g_515gFYNFhvEE0I,1438
146
146
  vellum/client/core/api_error.py,sha256=RE8LELok2QCjABadECTvtDp7qejA1VmINCh6TbqPwSE,426
147
- vellum/client/core/client_wrapper.py,sha256=KPv6Bi7gbvtP_1ZhF5Do4HCJqNkUHW0nNYBEtS1ZDeY,1869
147
+ vellum/client/core/client_wrapper.py,sha256=cx7FO_qUE2Qlz3HNnJqi2W7mb70YPGjdNeHhflWGylY,1869
148
148
  vellum/client/core/datetime_utils.py,sha256=nBys2IsYrhPdszxGKCNRPSOCwa-5DWOHG95FB8G9PKo,1047
149
149
  vellum/client/core/file.py,sha256=d4NNbX8XvXP32z8KpK2Xovv33nFfruIrpz0QWxlgpZk,2663
150
150
  vellum/client/core/http_client.py,sha256=Z77OIxIbL4OAB2IDqjRq_sYa5yNYAWfmdhdCSSvh6Y4,19552
@@ -160,7 +160,7 @@ vellum/client/errors/bad_request_error.py,sha256=_EbO8mWqN9kFZPvIap8qa1lL_EWkRcs
160
160
  vellum/client/errors/forbidden_error.py,sha256=QO1kKlhClAPES6zsEK7g9pglWnxn3KWaOCAawWOg6Aw,263
161
161
  vellum/client/errors/internal_server_error.py,sha256=8USCagXyJJ1MOm9snpcXIUt6eNXvrd_aq7Gfcu1vlOI,268
162
162
  vellum/client/errors/not_found_error.py,sha256=tBVCeBC8n3C811WHRj_n-hs3h8MqwR5gp0vLiobk7W8,262
163
- vellum/client/reference.md,sha256=I-z_aZGJKDQh443ywv92ezeI9w_XsiLh-vHULu8RsDg,91011
163
+ vellum/client/reference.md,sha256=AG_zpL_pTZNHF9fEU-3I74QxAfCfCAsUrOXizL2PWig,91317
164
164
  vellum/client/resources/__init__.py,sha256=XgQao4rJxyYu71j64RFIsshz4op9GE8-i-C5GCv-KVE,1555
165
165
  vellum/client/resources/ad_hoc/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
166
166
  vellum/client/resources/ad_hoc/client.py,sha256=rtpiGR6j8CcXSnN6UW_jYwLLdfJ9dwkTm_nta9oRzno,25933
@@ -188,7 +188,7 @@ vellum/client/resources/ml_models/client.py,sha256=XIYapTEY6GRNr7V0Kjy5bEeKmrhv9
188
188
  vellum/client/resources/organizations/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
189
189
  vellum/client/resources/organizations/client.py,sha256=Uye92moqjAcOCs4astmuFpT92QdC5SLMunA-C8_G-gA,3675
190
190
  vellum/client/resources/prompts/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
191
- vellum/client/resources/prompts/client.py,sha256=Z9Q9zvoCI8onkEbGSr5xVpNzzstV7xU9MmTwBnNoX98,14222
191
+ vellum/client/resources/prompts/client.py,sha256=Kr_AZdPQUSesk_JtjHl6c7vIWp7TG2PcKC74NrpW6rQ,15060
192
192
  vellum/client/resources/release_reviews/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
193
193
  vellum/client/resources/release_reviews/client.py,sha256=nb-EC7c7Y0Rklvg6CnlUKO1EWrnK26osnYJ9Z5Yw9fA,5094
194
194
  vellum/client/resources/sandboxes/__init__.py,sha256=FTtvy8EDg9nNNg9WCatVgKTRYV8-_v1roeGPAKoa_pw,65
@@ -1572,7 +1572,7 @@ vellum/workflows/inputs/tests/test_inputs.py,sha256=lioA8917mFLYq7Ml69UNkqUjcWbb
1572
1572
  vellum/workflows/logging.py,sha256=_a217XogktV4Ncz6xKFz7WfYmZAzkfVRVuC0rWob8ls,437
1573
1573
  vellum/workflows/nodes/__init__.py,sha256=aVdQVv7Y3Ro3JlqXGpxwaU2zrI06plDHD2aumH5WUIs,1157
1574
1574
  vellum/workflows/nodes/bases/__init__.py,sha256=cniHuz_RXdJ4TQgD8CBzoiKDiPxg62ErdVpCbWICX64,58
1575
- vellum/workflows/nodes/bases/base.py,sha256=FHZ5_pzN9NJ5Vpj1uo2QP-BzxCtVCUvcDo-taoqmasw,21095
1575
+ vellum/workflows/nodes/bases/base.py,sha256=OLnt140jfqRHxg4xyzdiBd3TciqNPfuNNH2iXCpv7a4,20422
1576
1576
  vellum/workflows/nodes/bases/base_adornment_node.py,sha256=hrgzuTetM4ynPd9YGHoK8Vwwn4XITi3aZZ_OCnQrq4Y,3433
1577
1577
  vellum/workflows/nodes/bases/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1578
1578
  vellum/workflows/nodes/bases/tests/test_base_adornment_node.py,sha256=fXZI9KqpS4XMBrBnIEkK3foHaBVvyHwYcQWWDKay7ic,1148
@@ -1587,7 +1587,7 @@ vellum/workflows/nodes/core/inline_subworkflow_node/tests/test_node.py,sha256=kU
1587
1587
  vellum/workflows/nodes/core/map_node/__init__.py,sha256=MXpZYmGfhsMJHqqlpd64WiJRtbAtAMQz-_3fCU_cLV0,56
1588
1588
  vellum/workflows/nodes/core/map_node/node.py,sha256=rbF7fLAU0vUDEpgtWqeQTZFlhWOhJw38tgxWJ6exud8,9313
1589
1589
  vellum/workflows/nodes/core/map_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1590
- vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=rf7CCDtjHxoPKeEtm9a8v_MNvkvu5UThH4xRXYrdEl8,6904
1590
+ vellum/workflows/nodes/core/map_node/tests/test_node.py,sha256=v80IwAZl3w6WVhMXMV-4fGnwre2_S1Z6zL5HoYZrTz8,8787
1591
1591
  vellum/workflows/nodes/core/retry_node/__init__.py,sha256=lN2bIy5a3Uzhs_FYCrooADyYU6ZGShtvLKFWpelwPvo,60
1592
1592
  vellum/workflows/nodes/core/retry_node/node.py,sha256=EM4ya8Myr7ADllpjt9q-BAhB3hGrsF8MLZhp5eh4lyo,5590
1593
1593
  vellum/workflows/nodes/core/retry_node/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -1680,7 +1680,7 @@ vellum/workflows/nodes/tests/test_mocks.py,sha256=mfPvrs75PKcsNsbJLQAN6PDFoVqs9T
1680
1680
  vellum/workflows/nodes/tests/test_utils.py,sha256=OPVZo9yi8qt0rVqayKhfgh4Hk-dVdIzqfbS89fDhRiE,4913
1681
1681
  vellum/workflows/nodes/utils.py,sha256=K2gf05eM-EKkKHf2SPpvEly8cBL4RftWSMvIZJIMlso,9455
1682
1682
  vellum/workflows/outputs/__init__.py,sha256=AyZ4pRh_ACQIGvkf0byJO46EDnSix1ZCAXfvh-ms1QE,94
1683
- vellum/workflows/outputs/base.py,sha256=1OGHqBJVk7i8cW8uewFWOhIjuMlRRpzCDrGE30ZwDjw,8763
1683
+ vellum/workflows/outputs/base.py,sha256=PUn0zhGzYCSZL34JXtXg9zALlXS_cqxZldLilPxDzb8,9614
1684
1684
  vellum/workflows/ports/__init__.py,sha256=bZuMt-R7z5bKwpu4uPW7LlJeePOQWmCcDSXe5frUY5g,101
1685
1685
  vellum/workflows/ports/node_ports.py,sha256=2Uo9gwNVCuH86J-GXcpc95QSDh5I-XVvhHJCMSWe-S8,2825
1686
1686
  vellum/workflows/ports/port.py,sha256=j_qiZlpx-a1cK5E7sxXwPcb_9NS-KUM-JoP8mgqg32k,4073
@@ -1717,7 +1717,7 @@ vellum/workflows/types/__init__.py,sha256=KxUTMBGzuRCfiMqzzsykOeVvrrkaZmTTo1a7SL
1717
1717
  vellum/workflows/types/code_execution_node_wrappers.py,sha256=3MNIoFZKzVzNS5qFLVuDwMV17QJw72zo7NRf52yMq5A,3074
1718
1718
  vellum/workflows/types/core.py,sha256=iLJkMKf417kjwRncWdT_qsfJ-qBv5x58um7SfrydJbs,1266
1719
1719
  vellum/workflows/types/definition.py,sha256=guuCHZkto8bkknoMcjfXRhSaDuiNjx3SNkutPf1makc,2506
1720
- vellum/workflows/types/generics.py,sha256=tKXz0LwWJGKw1YGudyl9_yFDrRgU6yYV1yJV1Zv-LTw,1430
1720
+ vellum/workflows/types/generics.py,sha256=8jptbEx1fnJV0Lhj0MpCJOT6yNiEWeTOYOwrEAb5CRU,1576
1721
1721
  vellum/workflows/types/stack.py,sha256=h7NE0vXR7l9DevFBIzIAk1Zh59K-kECQtDTKOUunwMY,1314
1722
1722
  vellum/workflows/types/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1723
1723
  vellum/workflows/types/tests/test_utils.py,sha256=UnZog59tR577mVwqZRqqWn2fScoOU1H6up0EzS8zYhw,2536
@@ -1740,8 +1740,8 @@ vellum/workflows/workflows/event_filters.py,sha256=GSxIgwrX26a1Smfd-6yss2abGCnad
1740
1740
  vellum/workflows/workflows/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1741
1741
  vellum/workflows/workflows/tests/test_base_workflow.py,sha256=fROqff6AZpCIzaSwOKSdtYy4XR0UZQ6ejxL3RJOSJVs,20447
1742
1742
  vellum/workflows/workflows/tests/test_context.py,sha256=VJBUcyWVtMa_lE5KxdhgMu0WYNYnUQUDvTF7qm89hJ0,2333
1743
- vellum_ai-0.14.74.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1744
- vellum_ai-0.14.74.dist-info/METADATA,sha256=iFE4Twp7A5973aqOr498hlOQZ6LSnVAszYURwtB9VkI,5556
1745
- vellum_ai-0.14.74.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1746
- vellum_ai-0.14.74.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1747
- vellum_ai-0.14.74.dist-info/RECORD,,
1743
+ vellum_ai-0.14.75.dist-info/LICENSE,sha256=hOypcdt481qGNISA784bnAGWAE6tyIf9gc2E78mYC3E,1574
1744
+ vellum_ai-0.14.75.dist-info/METADATA,sha256=Wclzf2hGrdv0YvzbG76xaKSH1JyLVRISdFarQai4Lks,5556
1745
+ vellum_ai-0.14.75.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
1746
+ vellum_ai-0.14.75.dist-info/entry_points.txt,sha256=HCH4yc_V3J_nDv3qJzZ_nYS8llCHZViCDP1ejgCc5Ak,42
1747
+ vellum_ai-0.14.75.dist-info/RECORD,,