veris-ai 0.2.1__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of veris-ai might be problematic. Click here for more details.
- veris_ai/__init__.py +35 -1
- veris_ai/braintrust_tracing.py +282 -0
- veris_ai/jaeger_interface/README.md +109 -0
- veris_ai/jaeger_interface/__init__.py +26 -0
- veris_ai/jaeger_interface/client.py +133 -0
- veris_ai/jaeger_interface/models.py +153 -0
- veris_ai/tool_mock.py +167 -108
- veris_ai/utils.py +270 -0
- veris_ai-1.1.0.dist-info/METADATA +448 -0
- veris_ai-1.1.0.dist-info/RECORD +12 -0
- veris_ai-0.2.1.dist-info/METADATA +0 -137
- veris_ai-0.2.1.dist-info/RECORD +0 -6
- {veris_ai-0.2.1.dist-info → veris_ai-1.1.0.dist-info}/WHEEL +0 -0
- {veris_ai-0.2.1.dist-info → veris_ai-1.1.0.dist-info}/licenses/LICENSE +0 -0
veris_ai/utils.py
ADDED
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
import types
|
|
3
|
+
import typing
|
|
4
|
+
from contextlib import suppress
|
|
5
|
+
from typing import Any, ForwardRef, Literal, NotRequired, Required, Union, get_args, get_origin
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def convert_to_type(value: object, target_type: type) -> object:
|
|
11
|
+
"""Convert a value to the specified type."""
|
|
12
|
+
# Special case: Any type returns value as-is
|
|
13
|
+
if target_type is Any:
|
|
14
|
+
return value
|
|
15
|
+
|
|
16
|
+
origin = get_origin(target_type)
|
|
17
|
+
|
|
18
|
+
# Define conversion strategies for different type origins
|
|
19
|
+
converters = {
|
|
20
|
+
list: _convert_list,
|
|
21
|
+
dict: _convert_dict,
|
|
22
|
+
Union: _convert_union,
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
# Use appropriate converter based on origin
|
|
26
|
+
if origin in converters:
|
|
27
|
+
return converters[origin](value, target_type)
|
|
28
|
+
|
|
29
|
+
# Handle primitives and custom types
|
|
30
|
+
return _convert_simple_type(value, target_type)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _convert_list(value: object, target_type: type) -> list:
|
|
34
|
+
"""Convert a value to a typed list."""
|
|
35
|
+
if not isinstance(value, list):
|
|
36
|
+
error_msg = f"Expected list but got {type(value)}"
|
|
37
|
+
raise ValueError(error_msg)
|
|
38
|
+
|
|
39
|
+
item_type = get_args(target_type)[0]
|
|
40
|
+
return [convert_to_type(item, item_type) for item in value]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _convert_dict(value: object, target_type: type) -> dict:
|
|
44
|
+
"""Convert a value to a typed dict."""
|
|
45
|
+
if not isinstance(value, dict):
|
|
46
|
+
error_msg = f"Expected dict but got {type(value)}"
|
|
47
|
+
raise ValueError(error_msg)
|
|
48
|
+
|
|
49
|
+
key_type, value_type = get_args(target_type)
|
|
50
|
+
return {convert_to_type(k, key_type): convert_to_type(v, value_type) for k, v in value.items()}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _convert_union(value: object, target_type: type) -> object:
|
|
54
|
+
"""Try to convert value to one of the union types."""
|
|
55
|
+
union_types = get_args(target_type)
|
|
56
|
+
|
|
57
|
+
for possible_type in union_types:
|
|
58
|
+
with suppress(ValueError, TypeError):
|
|
59
|
+
return convert_to_type(value, possible_type)
|
|
60
|
+
|
|
61
|
+
error_msg = f"Could not convert {value} to any of the union types {union_types}"
|
|
62
|
+
raise ValueError(error_msg)
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def _convert_simple_type(value: object, target_type: type) -> object:
|
|
66
|
+
"""Convert to primitive or custom types."""
|
|
67
|
+
# Primitive types
|
|
68
|
+
if target_type in (str, int, float, bool):
|
|
69
|
+
return target_type(value)
|
|
70
|
+
|
|
71
|
+
# Custom types - try kwargs for dicts, then direct instantiation
|
|
72
|
+
if isinstance(value, dict):
|
|
73
|
+
with suppress(TypeError):
|
|
74
|
+
return target_type(**value)
|
|
75
|
+
|
|
76
|
+
return target_type(value)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _resolve_forward_ref(ref: ForwardRef, module_context: types.ModuleType | None = None) -> Any: # noqa: ANN401
|
|
80
|
+
"""Resolve a ForwardRef to its actual type."""
|
|
81
|
+
if not isinstance(ref, ForwardRef):
|
|
82
|
+
return ref
|
|
83
|
+
|
|
84
|
+
# Try to evaluate the forward reference
|
|
85
|
+
try:
|
|
86
|
+
# Get the module's namespace for evaluation
|
|
87
|
+
namespace = dict(vars(module_context)) if module_context else {}
|
|
88
|
+
|
|
89
|
+
# Add common typing imports to namespace
|
|
90
|
+
namespace.update(
|
|
91
|
+
{
|
|
92
|
+
"Union": Union,
|
|
93
|
+
"Any": Any,
|
|
94
|
+
"Literal": Literal,
|
|
95
|
+
"Required": Required,
|
|
96
|
+
"NotRequired": NotRequired,
|
|
97
|
+
"List": list,
|
|
98
|
+
"Dict": dict,
|
|
99
|
+
"Optional": typing.Optional,
|
|
100
|
+
"Iterable": typing.Iterable,
|
|
101
|
+
"str": str,
|
|
102
|
+
"int": int,
|
|
103
|
+
"float": float,
|
|
104
|
+
"bool": bool,
|
|
105
|
+
},
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Try to import from the same module to resolve local references
|
|
109
|
+
if module_context and hasattr(module_context, "__name__"):
|
|
110
|
+
with suppress(Exception):
|
|
111
|
+
# Import all from the module to get access to local types
|
|
112
|
+
exec(f"from {module_context.__name__} import *", namespace) # noqa: S102
|
|
113
|
+
|
|
114
|
+
# Get the forward reference string
|
|
115
|
+
ref_string = ref.__forward_arg__ if hasattr(ref, "__forward_arg__") else str(ref)
|
|
116
|
+
|
|
117
|
+
# Try to evaluate the forward reference string
|
|
118
|
+
return eval(ref_string, namespace, namespace) # noqa: S307
|
|
119
|
+
except Exception:
|
|
120
|
+
# If we can't resolve it, return the ref itself
|
|
121
|
+
return ref
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _unwrap_required(field_type: Any) -> tuple[Any, bool]: # noqa: ANN401
|
|
125
|
+
"""Unwrap Required/NotRequired and return the inner type and whether it's required."""
|
|
126
|
+
origin = get_origin(field_type)
|
|
127
|
+
|
|
128
|
+
# Check if it's Required or NotRequired
|
|
129
|
+
if origin is Required:
|
|
130
|
+
args = get_args(field_type)
|
|
131
|
+
return args[0] if args else field_type, True
|
|
132
|
+
if origin is NotRequired:
|
|
133
|
+
args = get_args(field_type)
|
|
134
|
+
return args[0] if args else field_type, False
|
|
135
|
+
|
|
136
|
+
# Default to required for TypedDict fields
|
|
137
|
+
return field_type, True
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def extract_json_schema(target_type: Any) -> dict: # noqa: PLR0911, PLR0912, C901, ANN401
|
|
141
|
+
"""Extract the JSON schema from a type or pydantic model.
|
|
142
|
+
|
|
143
|
+
Args:
|
|
144
|
+
target_type: The type or pydantic model to extract the JSON schema from.
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
A dictionary representing the JSON schema.
|
|
148
|
+
|
|
149
|
+
Example:
|
|
150
|
+
>>> extract_json_schema(int)
|
|
151
|
+
{"type": "integer"}
|
|
152
|
+
|
|
153
|
+
>>> extract_json_schema(list[int])
|
|
154
|
+
{"type": "array", "items": {"type": "integer"}}
|
|
155
|
+
|
|
156
|
+
>>> extract_json_schema(list[User])
|
|
157
|
+
{"type": "array", "items": {"type": "object", "properties": {...}}}
|
|
158
|
+
"""
|
|
159
|
+
# Handle Pydantic BaseModel instances or classes
|
|
160
|
+
if isinstance(target_type, type) and issubclass(target_type, BaseModel):
|
|
161
|
+
return target_type.model_json_schema()
|
|
162
|
+
if isinstance(target_type, BaseModel):
|
|
163
|
+
return target_type.model_json_schema()
|
|
164
|
+
|
|
165
|
+
# Handle TypedDict
|
|
166
|
+
if (
|
|
167
|
+
isinstance(target_type, type)
|
|
168
|
+
and hasattr(target_type, "__annotations__")
|
|
169
|
+
and hasattr(target_type, "__total__")
|
|
170
|
+
):
|
|
171
|
+
# This is a TypedDict
|
|
172
|
+
properties = {}
|
|
173
|
+
required = []
|
|
174
|
+
|
|
175
|
+
# Get the module context for resolving forward references
|
|
176
|
+
module = sys.modules.get(target_type.__module__)
|
|
177
|
+
|
|
178
|
+
for field_name, field_type_annotation in target_type.__annotations__.items():
|
|
179
|
+
# Resolve forward references if present
|
|
180
|
+
resolved_type = field_type_annotation
|
|
181
|
+
if isinstance(resolved_type, ForwardRef):
|
|
182
|
+
resolved_type = _resolve_forward_ref(resolved_type, module)
|
|
183
|
+
|
|
184
|
+
# Unwrap Required/NotRequired
|
|
185
|
+
unwrapped_type, is_required = _unwrap_required(resolved_type)
|
|
186
|
+
|
|
187
|
+
# Extract schema for the unwrapped type
|
|
188
|
+
properties[field_name] = extract_json_schema(unwrapped_type)
|
|
189
|
+
|
|
190
|
+
# Add to required list if necessary
|
|
191
|
+
if is_required and getattr(target_type, "__total__", True):
|
|
192
|
+
required.append(field_name)
|
|
193
|
+
|
|
194
|
+
schema = {"type": "object", "properties": properties}
|
|
195
|
+
if required:
|
|
196
|
+
schema["required"] = required
|
|
197
|
+
return schema
|
|
198
|
+
|
|
199
|
+
# Handle built-in types
|
|
200
|
+
type_mapping = {
|
|
201
|
+
str: {"type": "string"},
|
|
202
|
+
int: {"type": "integer"},
|
|
203
|
+
float: {"type": "number"},
|
|
204
|
+
bool: {"type": "boolean"},
|
|
205
|
+
type(None): {"type": "null"},
|
|
206
|
+
Any: {}, # Empty schema for Any type
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
if target_type in type_mapping:
|
|
210
|
+
return type_mapping[target_type]
|
|
211
|
+
|
|
212
|
+
# Handle generic types
|
|
213
|
+
origin = get_origin(target_type)
|
|
214
|
+
|
|
215
|
+
# Handle bare collection types
|
|
216
|
+
if target_type is list:
|
|
217
|
+
return {"type": "array"}
|
|
218
|
+
if target_type is dict:
|
|
219
|
+
return {"type": "object"}
|
|
220
|
+
if target_type is tuple:
|
|
221
|
+
return {"type": "array"}
|
|
222
|
+
|
|
223
|
+
# Handle Literal types
|
|
224
|
+
if origin is Literal:
|
|
225
|
+
values = get_args(target_type)
|
|
226
|
+
if len(values) == 1:
|
|
227
|
+
# Single literal value - use const
|
|
228
|
+
return {"const": values[0]}
|
|
229
|
+
# Multiple literal values - use enum
|
|
230
|
+
return {"enum": list(values)}
|
|
231
|
+
|
|
232
|
+
if origin is list:
|
|
233
|
+
args = get_args(target_type)
|
|
234
|
+
if args:
|
|
235
|
+
return {"type": "array", "items": extract_json_schema(args[0])}
|
|
236
|
+
return {"type": "array"}
|
|
237
|
+
|
|
238
|
+
if origin is dict:
|
|
239
|
+
args = get_args(target_type)
|
|
240
|
+
if len(args) == 2: # noqa: PLR2004
|
|
241
|
+
# For typed dicts like dict[str, int]
|
|
242
|
+
return {
|
|
243
|
+
"type": "object",
|
|
244
|
+
"additionalProperties": extract_json_schema(args[1]),
|
|
245
|
+
}
|
|
246
|
+
return {"type": "object"}
|
|
247
|
+
|
|
248
|
+
if origin is Union:
|
|
249
|
+
args = get_args(target_type)
|
|
250
|
+
# Handle Optional types (Union[T, None])
|
|
251
|
+
if len(args) == 2 and type(None) in args: # noqa: PLR2004
|
|
252
|
+
non_none_type = args[0] if args[1] is type(None) else args[1]
|
|
253
|
+
schema = extract_json_schema(non_none_type)
|
|
254
|
+
return {"anyOf": [schema, {"type": "null"}]}
|
|
255
|
+
# Handle general Union types
|
|
256
|
+
return {"anyOf": [extract_json_schema(arg) for arg in args]}
|
|
257
|
+
|
|
258
|
+
if origin is tuple:
|
|
259
|
+
args = get_args(target_type)
|
|
260
|
+
if args:
|
|
261
|
+
return {
|
|
262
|
+
"type": "array",
|
|
263
|
+
"prefixItems": [extract_json_schema(arg) for arg in args],
|
|
264
|
+
"minItems": len(args),
|
|
265
|
+
"maxItems": len(args),
|
|
266
|
+
}
|
|
267
|
+
return {"type": "array"}
|
|
268
|
+
|
|
269
|
+
# Default case for unknown types
|
|
270
|
+
return {"type": "object"}
|