ragaai-catalyst 2.1.1b5__py3-none-any.whl → 2.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/dataset.py +1 -5
- ragaai_catalyst/experiment.py +1 -5
- ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py +11 -7
- ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +1 -10
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +95 -84
- ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +42 -17
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +4 -2
- ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +1 -11
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +4 -3
- ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +53 -5
- ragaai_catalyst/tracers/exporters/raga_exporter.py +1 -5
- ragaai_catalyst/tracers/tracer.py +18 -16
- {ragaai_catalyst-2.1.1b5.dist-info → ragaai_catalyst-2.1.2.dist-info}/METADATA +2 -2
- {ragaai_catalyst-2.1.1b5.dist-info → ragaai_catalyst-2.1.2.dist-info}/RECORD +16 -16
- {ragaai_catalyst-2.1.1b5.dist-info → ragaai_catalyst-2.1.2.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.1.1b5.dist-info → ragaai_catalyst-2.1.2.dist-info}/top_level.txt +0 -0
ragaai_catalyst/dataset.py
CHANGED
@@ -16,11 +16,7 @@ class Dataset:
|
|
16
16
|
def __init__(self, project_name):
|
17
17
|
self.project_name = project_name
|
18
18
|
self.num_projects = 99999
|
19
|
-
Dataset.BASE_URL =
|
20
|
-
os.getenv("RAGAAI_CATALYST_BASE_URL")
|
21
|
-
if os.getenv("RAGAAI_CATALYST_BASE_URL")
|
22
|
-
else "https://catalyst.raga.ai/api"
|
23
|
-
)
|
19
|
+
Dataset.BASE_URL = RagaAICatalyst.BASE_URL
|
24
20
|
headers = {
|
25
21
|
"Authorization": f'Bearer {os.getenv("RAGAAI_CATALYST_TOKEN")}',
|
26
22
|
}
|
ragaai_catalyst/experiment.py
CHANGED
@@ -31,11 +31,7 @@ class Experiment:
|
|
31
31
|
Returns:
|
32
32
|
None
|
33
33
|
"""
|
34
|
-
Experiment.BASE_URL =
|
35
|
-
os.getenv("RAGAAI_CATALYST_BASE_URL")
|
36
|
-
if os.getenv("RAGAAI_CATALYST_BASE_URL")
|
37
|
-
else "https://llm-platform.prod5.ragaai.ai/api"
|
38
|
-
)
|
34
|
+
Experiment.BASE_URL = RagaAICatalyst.BASE_URL
|
39
35
|
self.project_name = project_name
|
40
36
|
self.experiment_name = experiment_name
|
41
37
|
self.experiment_description = experiment_description
|
@@ -173,15 +173,17 @@ class LLMCall:
|
|
173
173
|
duration: float = field(default=0)
|
174
174
|
|
175
175
|
class Component:
|
176
|
-
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
176
|
+
def __init__(self, id: str, hash_id: str, source_hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], extra_info: Optional[Dict[str, Any]] = None, data: Dict[str, Any]={}, network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
177
177
|
self.id = id
|
178
178
|
self.hash_id = hash_id
|
179
|
+
self.source_hash_id = source_hash_id
|
179
180
|
self.type = type
|
180
181
|
self.name = name
|
181
182
|
self.start_time = start_time
|
182
183
|
self.end_time = end_time
|
183
184
|
self.parent_id = parent_id
|
184
185
|
self.info = info
|
186
|
+
self.extra_info = extra_info
|
185
187
|
self.data = data
|
186
188
|
self.error = error
|
187
189
|
self.network_calls = network_calls or []
|
@@ -205,12 +207,14 @@ class Component:
|
|
205
207
|
return {
|
206
208
|
"id": self.id,
|
207
209
|
"hash_id": self.hash_id,
|
210
|
+
"source_hash_id": self.source_hash_id,
|
208
211
|
"type": self.type,
|
209
212
|
"name": self.name,
|
210
213
|
"start_time": self.start_time,
|
211
214
|
"end_time": self.end_time,
|
212
215
|
"parent_id": self.parent_id,
|
213
216
|
"info": self.info,
|
217
|
+
"extra_info": self.extra_info,
|
214
218
|
"error": self.error,
|
215
219
|
"data": self.data,
|
216
220
|
"error": self.error,
|
@@ -219,16 +223,16 @@ class Component:
|
|
219
223
|
}
|
220
224
|
|
221
225
|
class LLMComponent(Component):
|
222
|
-
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
223
|
-
super().__init__(id, hash_id, type, name, start_time, end_time, parent_id, info, data, network_calls, interactions, error)
|
226
|
+
def __init__(self, id: str, hash_id: str, source_hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], extra_info: Optional[Dict[str, Any]] = None, network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
227
|
+
super().__init__(id, hash_id, source_hash_id, type, name, start_time, end_time, parent_id, info, extra_info, data, network_calls, interactions, error)
|
224
228
|
|
225
229
|
class AgentComponent(Component):
|
226
|
-
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
227
|
-
super().__init__(id, hash_id, type, name, start_time, end_time, parent_id, info, data, network_calls, interactions, error)
|
230
|
+
def __init__(self, id: str, hash_id: str, source_hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], extra_info: Optional[Dict[str, Any]] = None, network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
231
|
+
super().__init__(id, hash_id, source_hash_id, type, name, start_time, end_time, parent_id, info, extra_info, data, network_calls, interactions, error)
|
228
232
|
|
229
233
|
class ToolComponent(Component):
|
230
|
-
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
231
|
-
super().__init__(id, hash_id, type, name, start_time, end_time, parent_id, info, data, network_calls, interactions, error)
|
234
|
+
def __init__(self, id: str, hash_id: str, source_hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], extra_info: Optional[Dict[str, Any]] = None, network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Dict[str, Any]] = None):
|
235
|
+
super().__init__(id, hash_id, source_hash_id, type, name, start_time, end_time, parent_id, info, extra_info, data, network_calls, interactions, error)
|
232
236
|
|
233
237
|
@dataclass
|
234
238
|
class ComponentInfo:
|
@@ -61,7 +61,6 @@ class AgentTracerMixin:
|
|
61
61
|
version=version,
|
62
62
|
capabilities=capabilities or [],
|
63
63
|
start_time=datetime.now(),
|
64
|
-
end_time=datetime.now(),
|
65
64
|
memory_used=0,
|
66
65
|
input_data=tracer._sanitize_input(args, kwargs),
|
67
66
|
output_data=None,
|
@@ -110,7 +109,6 @@ class AgentTracerMixin:
|
|
110
109
|
try:
|
111
110
|
start_time = datetime.now()
|
112
111
|
result = method(self, *args, **kwargs)
|
113
|
-
end_time = datetime.now()
|
114
112
|
|
115
113
|
# Update agent component with method result
|
116
114
|
if hasattr(tracer, '_agent_components'):
|
@@ -119,7 +117,6 @@ class AgentTracerMixin:
|
|
119
117
|
component['data']['output'] = tracer._sanitize_output(result)
|
120
118
|
component['data']['input'] = tracer._sanitize_input(args, kwargs)
|
121
119
|
component['start_time'] = start_time.isoformat()
|
122
|
-
component['end_time'] = end_time.isoformat()
|
123
120
|
|
124
121
|
# Get children accumulated during method execution
|
125
122
|
children = tracer.agent_children.get()
|
@@ -192,7 +189,6 @@ class AgentTracerMixin:
|
|
192
189
|
result = func(*args, **kwargs)
|
193
190
|
|
194
191
|
# Calculate resource usage
|
195
|
-
end_time = datetime.now()
|
196
192
|
end_memory = psutil.Process().memory_info().rss
|
197
193
|
memory_used = max(0, end_memory - start_memory)
|
198
194
|
|
@@ -211,7 +207,6 @@ class AgentTracerMixin:
|
|
211
207
|
version=version,
|
212
208
|
capabilities=capabilities or [],
|
213
209
|
start_time=start_time,
|
214
|
-
end_time=end_time,
|
215
210
|
memory_used=memory_used,
|
216
211
|
input_data=self.input_data,
|
217
212
|
output_data=self._sanitize_output(result),
|
@@ -257,7 +252,6 @@ class AgentTracerMixin:
|
|
257
252
|
version=version,
|
258
253
|
capabilities=capabilities or [],
|
259
254
|
start_time=start_time,
|
260
|
-
end_time=datetime.now(),
|
261
255
|
memory_used=0,
|
262
256
|
input_data=self.input_data,
|
263
257
|
output_data=None,
|
@@ -309,7 +303,6 @@ class AgentTracerMixin:
|
|
309
303
|
result = await func(*args, **kwargs)
|
310
304
|
|
311
305
|
# Calculate resource usage
|
312
|
-
end_time = datetime.now()
|
313
306
|
end_memory = psutil.Process().memory_info().rss
|
314
307
|
memory_used = max(0, end_memory - start_memory)
|
315
308
|
|
@@ -325,7 +318,6 @@ class AgentTracerMixin:
|
|
325
318
|
version=version,
|
326
319
|
capabilities=capabilities or [],
|
327
320
|
start_time=start_time,
|
328
|
-
end_time=end_time,
|
329
321
|
memory_used=memory_used,
|
330
322
|
input_data=self._sanitize_input(args, kwargs),
|
331
323
|
output_data=self._sanitize_output(result),
|
@@ -372,7 +364,6 @@ class AgentTracerMixin:
|
|
372
364
|
version=version,
|
373
365
|
capabilities=capabilities or [],
|
374
366
|
start_time=start_time,
|
375
|
-
end_time=datetime.now(),
|
376
367
|
memory_used=0,
|
377
368
|
input_data=self._sanitize_input(args, kwargs),
|
378
369
|
output_data=None,
|
@@ -408,7 +399,7 @@ class AgentTracerMixin:
|
|
408
399
|
"type": "agent",
|
409
400
|
"name": kwargs["name"],
|
410
401
|
"start_time": start_time.isoformat(),
|
411
|
-
"end_time":
|
402
|
+
"end_time": datetime.now().astimezone().isoformat(),
|
412
403
|
"error": kwargs.get("error"),
|
413
404
|
"parent_id": kwargs.get("parent_id"),
|
414
405
|
"info": {
|
@@ -9,7 +9,7 @@ from typing import List
|
|
9
9
|
import uuid
|
10
10
|
import sys
|
11
11
|
import tempfile
|
12
|
-
|
12
|
+
from ....ragaai_catalyst import RagaAICatalyst
|
13
13
|
from ..data.data_structure import (
|
14
14
|
Trace, Metadata, SystemInfo, OSInfo, EnvironmentInfo,
|
15
15
|
Resources, CPUResource, MemoryResource, DiskResource, NetworkResource,
|
@@ -47,7 +47,7 @@ class BaseTracer:
|
|
47
47
|
def __init__(self, user_details):
|
48
48
|
self.user_details = user_details
|
49
49
|
self.project_name = self.user_details['project_name'] # Access the project_name
|
50
|
-
self.
|
50
|
+
self._dataset_name = self.user_details['dataset_name'] # Access the dataset_name
|
51
51
|
self.project_id = self.user_details['project_id'] # Access the project_id
|
52
52
|
|
53
53
|
# Initialize trace data
|
@@ -55,87 +55,18 @@ class BaseTracer:
|
|
55
55
|
self.start_time = None
|
56
56
|
self.components: List[Component] = []
|
57
57
|
self.file_tracker = TrackName()
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
env_info = EnvironmentInfo(
|
71
|
-
name="Python",
|
72
|
-
version=platform.python_version(),
|
73
|
-
packages=installed_packages,
|
74
|
-
env_path=sys.prefix,
|
75
|
-
command_to_run=f"python {sys.argv[0]}"
|
76
|
-
)
|
77
|
-
|
78
|
-
return SystemInfo(
|
79
|
-
id=f"sys_{self.trace_id}",
|
80
|
-
os=os_info,
|
81
|
-
environment=env_info,
|
82
|
-
source_code="Path to the source code .zip file in format hashid.zip" # TODO: Implement source code archiving
|
83
|
-
)
|
84
|
-
|
85
|
-
def _get_resources(self) -> Resources:
|
86
|
-
# CPU info
|
87
|
-
cpu_info = ResourceInfo(
|
88
|
-
name=platform.processor(),
|
89
|
-
cores=psutil.cpu_count(logical=False),
|
90
|
-
threads=psutil.cpu_count(logical=True)
|
91
|
-
)
|
92
|
-
cpu = CPUResource(
|
93
|
-
info=cpu_info,
|
94
|
-
interval="5s",
|
95
|
-
values=[psutil.cpu_percent()]
|
96
|
-
)
|
97
|
-
|
98
|
-
# Memory info
|
99
|
-
memory = psutil.virtual_memory()
|
100
|
-
mem_info = MemoryInfo(
|
101
|
-
total=memory.total / (1024**3), # Convert to GB
|
102
|
-
free=memory.available / (1024**3)
|
103
|
-
)
|
104
|
-
mem = MemoryResource(
|
105
|
-
info=mem_info,
|
106
|
-
interval="5s",
|
107
|
-
values=[memory.percent]
|
108
|
-
)
|
109
|
-
|
110
|
-
# Disk info
|
111
|
-
disk = psutil.disk_usage('/')
|
112
|
-
disk_info = DiskInfo(
|
113
|
-
total=disk.total / (1024**3),
|
114
|
-
free=disk.free / (1024**3)
|
115
|
-
)
|
116
|
-
disk_io = psutil.disk_io_counters()
|
117
|
-
disk_resource = DiskResource(
|
118
|
-
info=disk_info,
|
119
|
-
interval="5s",
|
120
|
-
read=[disk_io.read_bytes / (1024**2)], # MB
|
121
|
-
write=[disk_io.write_bytes / (1024**2)]
|
122
|
-
)
|
123
|
-
|
124
|
-
# Network info
|
125
|
-
net_io = psutil.net_io_counters()
|
126
|
-
net_info = NetworkInfo(
|
127
|
-
upload_speed=net_io.bytes_sent / (1024**2), # MB
|
128
|
-
download_speed=net_io.bytes_recv / (1024**2)
|
129
|
-
)
|
130
|
-
net = NetworkResource(
|
131
|
-
info=net_info,
|
132
|
-
interval="5s",
|
133
|
-
uploads=[net_io.bytes_sent / (1024**2)],
|
134
|
-
downloads=[net_io.bytes_recv / (1024**2)]
|
135
|
-
)
|
136
|
-
|
137
|
-
return Resources(cpu=cpu, memory=mem, disk=disk_resource, network=net)
|
138
|
-
|
58
|
+
|
59
|
+
@property
|
60
|
+
def dataset_name(self):
|
61
|
+
"""Get the dataset name."""
|
62
|
+
return self._dataset_name
|
63
|
+
|
64
|
+
@dataset_name.setter
|
65
|
+
def dataset_name(self, value):
|
66
|
+
"""Set the dataset name and update user_details."""
|
67
|
+
self._dataset_name = value
|
68
|
+
self.user_details['dataset_name'] = value
|
69
|
+
|
139
70
|
def start(self):
|
140
71
|
"""Initialize a new trace"""
|
141
72
|
metadata = Metadata(
|
@@ -203,7 +134,7 @@ class BaseTracer:
|
|
203
134
|
project_id = self.project_id
|
204
135
|
dataset_name = self.dataset_name
|
205
136
|
user_detail = self.user_details
|
206
|
-
base_url =
|
137
|
+
base_url = RagaAICatalyst.BASE_URL
|
207
138
|
upload_traces = UploadAgenticTraces(
|
208
139
|
json_file_path=json_file_path,
|
209
140
|
project_name=project_name,
|
@@ -238,6 +169,86 @@ class BaseTracer:
|
|
238
169
|
def __exit__(self, exc_type, exc_value, traceback):
|
239
170
|
self.stop()
|
240
171
|
|
172
|
+
def _get_system_info(self) -> SystemInfo:
|
173
|
+
# Get OS info
|
174
|
+
os_info = OSInfo(
|
175
|
+
name=platform.system(),
|
176
|
+
version=platform.version(),
|
177
|
+
platform=platform.machine(),
|
178
|
+
kernel_version=platform.release()
|
179
|
+
)
|
180
|
+
|
181
|
+
# Get Python environment info
|
182
|
+
installed_packages = [f"{pkg.key}=={pkg.version}" for pkg in pkg_resources.working_set]
|
183
|
+
env_info = EnvironmentInfo(
|
184
|
+
name="Python",
|
185
|
+
version=platform.python_version(),
|
186
|
+
packages=installed_packages,
|
187
|
+
env_path=sys.prefix,
|
188
|
+
command_to_run=f"python {sys.argv[0]}"
|
189
|
+
)
|
190
|
+
|
191
|
+
return SystemInfo(
|
192
|
+
id=f"sys_{self.trace_id}",
|
193
|
+
os=os_info,
|
194
|
+
environment=env_info,
|
195
|
+
source_code="Path to the source code .zip file in format hashid.zip" # TODO: Implement source code archiving
|
196
|
+
)
|
197
|
+
|
198
|
+
def _get_resources(self) -> Resources:
|
199
|
+
# CPU info
|
200
|
+
cpu_info = ResourceInfo(
|
201
|
+
name=platform.processor(),
|
202
|
+
cores=psutil.cpu_count(logical=False),
|
203
|
+
threads=psutil.cpu_count(logical=True)
|
204
|
+
)
|
205
|
+
cpu = CPUResource(
|
206
|
+
info=cpu_info,
|
207
|
+
interval="5s",
|
208
|
+
values=[psutil.cpu_percent()]
|
209
|
+
)
|
210
|
+
|
211
|
+
# Memory info
|
212
|
+
memory = psutil.virtual_memory()
|
213
|
+
mem_info = MemoryInfo(
|
214
|
+
total=memory.total / (1024**3), # Convert to GB
|
215
|
+
free=memory.available / (1024**3)
|
216
|
+
)
|
217
|
+
mem = MemoryResource(
|
218
|
+
info=mem_info,
|
219
|
+
interval="5s",
|
220
|
+
values=[memory.percent]
|
221
|
+
)
|
222
|
+
|
223
|
+
# Disk info
|
224
|
+
disk = psutil.disk_usage('/')
|
225
|
+
disk_info = DiskInfo(
|
226
|
+
total=disk.total / (1024**3),
|
227
|
+
free=disk.free / (1024**3)
|
228
|
+
)
|
229
|
+
disk_io = psutil.disk_io_counters()
|
230
|
+
disk_resource = DiskResource(
|
231
|
+
info=disk_info,
|
232
|
+
interval="5s",
|
233
|
+
read=[disk_io.read_bytes / (1024**2)], # MB
|
234
|
+
write=[disk_io.write_bytes / (1024**2)]
|
235
|
+
)
|
236
|
+
|
237
|
+
# Network info
|
238
|
+
net_io = psutil.net_io_counters()
|
239
|
+
net_info = NetworkInfo(
|
240
|
+
upload_speed=net_io.bytes_sent / (1024**2), # MB
|
241
|
+
download_speed=net_io.bytes_recv / (1024**2)
|
242
|
+
)
|
243
|
+
net = NetworkResource(
|
244
|
+
info=net_info,
|
245
|
+
interval="5s",
|
246
|
+
uploads=[net_io.bytes_sent / (1024**2)],
|
247
|
+
downloads=[net_io.bytes_recv / (1024**2)]
|
248
|
+
)
|
249
|
+
|
250
|
+
return Resources(cpu=cpu, memory=mem, disk=disk_resource, network=net)
|
251
|
+
|
241
252
|
def _change_span_ids_to_int(self, trace):
|
242
253
|
id, parent_id = 1, 0
|
243
254
|
for span in trace.data[0]["spans"]:
|
@@ -41,6 +41,7 @@ class LLMTracerMixin:
|
|
41
41
|
"output_cost_per_token": 0.0
|
42
42
|
}
|
43
43
|
}
|
44
|
+
self.MAX_PARAMETERS_TO_DISPLAY = 10
|
44
45
|
self.current_llm_call_name = contextvars.ContextVar("llm_call_name", default=None)
|
45
46
|
self.component_network_calls = {}
|
46
47
|
self.component_user_interaction = {}
|
@@ -184,9 +185,25 @@ class LLMTracerMixin:
|
|
184
185
|
@functools.wraps(original_init)
|
185
186
|
def patched_init(client_self, *args, **kwargs):
|
186
187
|
original_init(client_self, *args, **kwargs)
|
187
|
-
|
188
|
-
|
189
|
-
|
188
|
+
# Check if this is AsyncOpenAI or OpenAI
|
189
|
+
is_async = 'AsyncOpenAI' in client_class.__name__
|
190
|
+
|
191
|
+
if is_async:
|
192
|
+
# Patch async methods for AsyncOpenAI
|
193
|
+
if hasattr(client_self.chat.completions, "create"):
|
194
|
+
original_create = client_self.chat.completions.create
|
195
|
+
@functools.wraps(original_create)
|
196
|
+
async def wrapped_create(*args, **kwargs):
|
197
|
+
return await self.trace_llm_call(original_create, *args, **kwargs)
|
198
|
+
client_self.chat.completions.create = wrapped_create
|
199
|
+
else:
|
200
|
+
# Patch sync methods for OpenAI
|
201
|
+
if hasattr(client_self.chat.completions, "create"):
|
202
|
+
original_create = client_self.chat.completions.create
|
203
|
+
@functools.wraps(original_create)
|
204
|
+
def wrapped_create(*args, **kwargs):
|
205
|
+
return self.trace_llm_call_sync(original_create, *args, **kwargs)
|
206
|
+
client_self.chat.completions.create = wrapped_create
|
190
207
|
|
191
208
|
setattr(client_class, "__init__", patched_init)
|
192
209
|
|
@@ -249,11 +266,27 @@ class LLMTracerMixin:
|
|
249
266
|
setattr(obj, method_name, wrapped_method)
|
250
267
|
self.patches.append((obj, method_name, original_method))
|
251
268
|
|
252
|
-
def create_llm_component(self, component_id, hash_id, name, llm_type, version, memory_used, start_time,
|
269
|
+
def create_llm_component(self, component_id, hash_id, name, llm_type, version, memory_used, start_time, input_data, output_data, cost={}, usage={}, error=None, parameters={}):
|
253
270
|
# Update total metrics
|
254
271
|
self.total_tokens += usage.get("total_tokens", 0)
|
255
272
|
self.total_cost += cost.get("total_cost", 0)
|
256
273
|
|
274
|
+
parameters_to_display = {}
|
275
|
+
if 'run_manager' in parameters:
|
276
|
+
parameters_obj = parameters['run_manager']
|
277
|
+
if hasattr(parameters_obj, 'metadata'):
|
278
|
+
metadata = parameters_obj.metadata
|
279
|
+
# parameters = {'metadata': metadata}
|
280
|
+
parameters_to_display.update(metadata)
|
281
|
+
|
282
|
+
# Add only those keys in parameters that are single values and not objects, dict or list
|
283
|
+
for key, value in parameters.items():
|
284
|
+
if isinstance(value, (str, int, float, bool)):
|
285
|
+
parameters_to_display[key] = value
|
286
|
+
|
287
|
+
# Limit the number of parameters to display
|
288
|
+
parameters_to_display = dict(list(parameters_to_display.items())[:self.MAX_PARAMETERS_TO_DISPLAY])
|
289
|
+
|
257
290
|
component = {
|
258
291
|
"id": component_id,
|
259
292
|
"hash_id": hash_id,
|
@@ -261,7 +294,7 @@ class LLMTracerMixin:
|
|
261
294
|
"type": "llm",
|
262
295
|
"name": name,
|
263
296
|
"start_time": start_time.isoformat(),
|
264
|
-
"end_time":
|
297
|
+
"end_time": datetime.now().astimezone().isoformat(),
|
265
298
|
"error": error,
|
266
299
|
"parent_id": self.current_agent_id.get(),
|
267
300
|
"info": {
|
@@ -270,8 +303,9 @@ class LLMTracerMixin:
|
|
270
303
|
"memory_used": memory_used,
|
271
304
|
"cost": cost,
|
272
305
|
"tokens": usage,
|
273
|
-
**
|
306
|
+
**parameters_to_display
|
274
307
|
},
|
308
|
+
"extra_info": parameters,
|
275
309
|
"data": {
|
276
310
|
"input": input_data['args'] if hasattr(input_data, 'args') else input_data,
|
277
311
|
"output": output_data.output_response if output_data else None,
|
@@ -314,7 +348,6 @@ class LLMTracerMixin:
|
|
314
348
|
result = await original_func(*args, **kwargs)
|
315
349
|
|
316
350
|
# Calculate resource usage
|
317
|
-
end_time = datetime.now().astimezone()
|
318
351
|
end_memory = psutil.Process().memory_info().rss
|
319
352
|
memory_used = max(0, end_memory - start_memory)
|
320
353
|
|
@@ -341,7 +374,6 @@ class LLMTracerMixin:
|
|
341
374
|
version="1.0.0",
|
342
375
|
memory_used=memory_used,
|
343
376
|
start_time=start_time,
|
344
|
-
end_time=end_time,
|
345
377
|
input_data=input_data,
|
346
378
|
output_data=extract_llm_output(result),
|
347
379
|
cost=cost,
|
@@ -363,8 +395,6 @@ class LLMTracerMixin:
|
|
363
395
|
|
364
396
|
# End tracking network calls for this component
|
365
397
|
self.end_component(component_id)
|
366
|
-
|
367
|
-
end_time = datetime.now().astimezone()
|
368
398
|
|
369
399
|
name = self.current_llm_call_name.get()
|
370
400
|
if name is None:
|
@@ -378,7 +408,6 @@ class LLMTracerMixin:
|
|
378
408
|
version="1.0.0",
|
379
409
|
memory_used=0,
|
380
410
|
start_time=start_time,
|
381
|
-
end_time=end_time,
|
382
411
|
input_data=extract_input_data(args, kwargs, None),
|
383
412
|
output_data=None,
|
384
413
|
error=error_component
|
@@ -402,7 +431,6 @@ class LLMTracerMixin:
|
|
402
431
|
self.start_component(component_id)
|
403
432
|
|
404
433
|
# Calculate resource usage
|
405
|
-
end_time = datetime.now().astimezone()
|
406
434
|
start_memory = psutil.Process().memory_info().rss
|
407
435
|
|
408
436
|
try:
|
@@ -438,7 +466,6 @@ class LLMTracerMixin:
|
|
438
466
|
version="1.0.0",
|
439
467
|
memory_used=memory_used,
|
440
468
|
start_time=start_time,
|
441
|
-
end_time=end_time,
|
442
469
|
input_data=input_data,
|
443
470
|
output_data=extract_llm_output(result),
|
444
471
|
cost=cost,
|
@@ -459,8 +486,6 @@ class LLMTracerMixin:
|
|
459
486
|
|
460
487
|
# End tracking network calls for this component
|
461
488
|
self.end_component(component_id)
|
462
|
-
|
463
|
-
end_time = datetime.now().astimezone()
|
464
489
|
|
465
490
|
name = self.current_llm_call_name.get()
|
466
491
|
if name is None:
|
@@ -477,7 +502,6 @@ class LLMTracerMixin:
|
|
477
502
|
version="1.0.0",
|
478
503
|
memory_used=memory_used,
|
479
504
|
start_time=start_time,
|
480
|
-
end_time=end_time,
|
481
505
|
input_data=extract_input_data(args, kwargs, None),
|
482
506
|
output_data=None,
|
483
507
|
error=error_component
|
@@ -487,12 +511,12 @@ class LLMTracerMixin:
|
|
487
511
|
raise
|
488
512
|
|
489
513
|
def trace_llm(self, name: str = None):
|
490
|
-
self.current_llm_call_name.set(name)
|
491
514
|
def decorator(func):
|
492
515
|
@self.file_tracker.trace_decorator
|
493
516
|
@functools.wraps(func)
|
494
517
|
async def async_wrapper(*args, **kwargs):
|
495
518
|
self.gt = kwargs.get('gt', None) if kwargs else None
|
519
|
+
self.current_llm_call_name.set(name)
|
496
520
|
if not self.is_active:
|
497
521
|
return await func(*args, **kwargs)
|
498
522
|
|
@@ -542,6 +566,7 @@ class LLMTracerMixin:
|
|
542
566
|
@functools.wraps(func)
|
543
567
|
def sync_wrapper(*args, **kwargs):
|
544
568
|
self.gt = kwargs.get('gt', None) if kwargs else None
|
569
|
+
self.current_llm_call_name.set(name)
|
545
570
|
if not self.is_active:
|
546
571
|
return func(*args, **kwargs)
|
547
572
|
|
@@ -166,7 +166,7 @@ class AgenticTracing(BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMix
|
|
166
166
|
def add_component(self, component_data: dict, is_error: bool = False):
|
167
167
|
"""Add a component to the trace data"""
|
168
168
|
# Convert dict to appropriate Component type
|
169
|
-
filtered_data = {k: v for k, v in component_data.items() if k in ["id", "hash_id", "type", "name", "start_time", "end_time", "parent_id", "info", "data", "network_calls", "interactions", "error"]}
|
169
|
+
filtered_data = {k: v for k, v in component_data.items() if k in ["id", "hash_id", "source_hash_id", "type", "name", "start_time", "end_time", "parent_id", "info", "extra_info", "data", "network_calls", "interactions", "error"]}
|
170
170
|
|
171
171
|
if component_data["type"] == "llm":
|
172
172
|
component = LLMComponent(**filtered_data)
|
@@ -206,6 +206,8 @@ class AgenticTracing(BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMix
|
|
206
206
|
parent_component = agent_tracer_mixin.create_agent_component(
|
207
207
|
component_id=parent_id,
|
208
208
|
hash_id=str(uuid.uuid4()),
|
209
|
+
source_hash_id=None,
|
210
|
+
type="agent",
|
209
211
|
name=self.current_agent_name.get(),
|
210
212
|
agent_type=self.agent_type.get(),
|
211
213
|
version=self.version.get(),
|
@@ -219,7 +221,7 @@ class AgenticTracing(BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMix
|
|
219
221
|
parent_id=None # Add parent ID if exists
|
220
222
|
)
|
221
223
|
|
222
|
-
filtered_data = {k: v for k, v in parent_component.items() if k in ["id", "hash_id", "type", "name", "start_time", "end_time", "parent_id", "info", "data", "network_calls", "interactions", "error"]}
|
224
|
+
filtered_data = {k: v for k, v in parent_component.items() if k in ["id", "hash_id", "source_hash_id", "type", "name", "start_time", "end_time", "parent_id", "info", "data", "network_calls", "interactions", "error"]}
|
223
225
|
parent_agent_component = AgentComponent(**filtered_data)
|
224
226
|
# Add the parent component to trace and stop tracing
|
225
227
|
super().add_component(parent_agent_component)
|
@@ -75,7 +75,6 @@ class ToolTracerMixin:
|
|
75
75
|
result = func(*args, **kwargs)
|
76
76
|
|
77
77
|
# Calculate resource usage
|
78
|
-
end_time = datetime.now().astimezone()
|
79
78
|
end_memory = psutil.Process().memory_info().rss
|
80
79
|
memory_used = max(0, end_memory - start_memory)
|
81
80
|
|
@@ -91,7 +90,6 @@ class ToolTracerMixin:
|
|
91
90
|
version=version,
|
92
91
|
memory_used=memory_used,
|
93
92
|
start_time=start_time,
|
94
|
-
end_time=end_time,
|
95
93
|
input_data=self._sanitize_input(args, kwargs),
|
96
94
|
output_data=self._sanitize_output(result)
|
97
95
|
)
|
@@ -110,8 +108,6 @@ class ToolTracerMixin:
|
|
110
108
|
# End tracking network calls for this component
|
111
109
|
self.end_component(component_id)
|
112
110
|
|
113
|
-
end_time = datetime.now().astimezone()
|
114
|
-
|
115
111
|
tool_component = self.create_tool_component(
|
116
112
|
component_id=component_id,
|
117
113
|
hash_id=hash_id,
|
@@ -120,7 +116,6 @@ class ToolTracerMixin:
|
|
120
116
|
version=version,
|
121
117
|
memory_used=0,
|
122
118
|
start_time=start_time,
|
123
|
-
end_time=end_time,
|
124
119
|
input_data=self._sanitize_input(args, kwargs),
|
125
120
|
output_data=None,
|
126
121
|
error=error_component
|
@@ -144,7 +139,6 @@ class ToolTracerMixin:
|
|
144
139
|
result = await func(*args, **kwargs)
|
145
140
|
|
146
141
|
# Calculate resource usage
|
147
|
-
end_time = datetime.now().astimezone()
|
148
142
|
end_memory = psutil.Process().memory_info().rss
|
149
143
|
memory_used = max(0, end_memory - start_memory)
|
150
144
|
|
@@ -156,7 +150,6 @@ class ToolTracerMixin:
|
|
156
150
|
tool_type=tool_type,
|
157
151
|
version=version,
|
158
152
|
start_time=start_time,
|
159
|
-
end_time=end_time,
|
160
153
|
memory_used=memory_used,
|
161
154
|
input_data=self._sanitize_input(args, kwargs),
|
162
155
|
output_data=self._sanitize_output(result)
|
@@ -172,8 +165,6 @@ class ToolTracerMixin:
|
|
172
165
|
"details": {}
|
173
166
|
}
|
174
167
|
|
175
|
-
end_time = datetime.now().astimezone()
|
176
|
-
|
177
168
|
tool_component = self.create_tool_component(
|
178
169
|
component_id=component_id,
|
179
170
|
hash_id=hash_id,
|
@@ -181,7 +172,6 @@ class ToolTracerMixin:
|
|
181
172
|
tool_type=tool_type,
|
182
173
|
version=version,
|
183
174
|
start_time=start_time,
|
184
|
-
end_time=end_time,
|
185
175
|
memory_used=0,
|
186
176
|
input_data=self._sanitize_input(args, kwargs),
|
187
177
|
output_data=None,
|
@@ -202,7 +192,7 @@ class ToolTracerMixin:
|
|
202
192
|
"type": "tool",
|
203
193
|
"name": kwargs["name"],
|
204
194
|
"start_time": start_time.isoformat(),
|
205
|
-
"end_time":
|
195
|
+
"end_time": datetime.now().astimezone().isoformat(),
|
206
196
|
"error": kwargs.get("error"),
|
207
197
|
"parent_id": self.current_agent_id.get(),
|
208
198
|
"info": {
|
@@ -3,6 +3,7 @@ import requests
|
|
3
3
|
import json
|
4
4
|
import os
|
5
5
|
import logging
|
6
|
+
from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
|
6
7
|
logger = logging.getLogger(__name__)
|
7
8
|
|
8
9
|
def upload_code(hash_id, zip_path, project_name, dataset_name):
|
@@ -26,7 +27,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name):
|
|
26
27
|
|
27
28
|
try:
|
28
29
|
response = requests.request("GET",
|
29
|
-
f"{
|
30
|
+
f"{RagaAICatalyst.BASE_URL}/v2/llm/dataset/code?datasetName={dataset_name}",
|
30
31
|
headers=headers,
|
31
32
|
data=payload,
|
32
33
|
timeout=99999)
|
@@ -54,7 +55,7 @@ def _fetch_presigned_url(project_name, dataset_name):
|
|
54
55
|
|
55
56
|
try:
|
56
57
|
response = requests.request("GET",
|
57
|
-
f"{
|
58
|
+
f"{RagaAICatalyst.BASE_URL}/v1/llm/presigned-url",
|
58
59
|
headers=headers,
|
59
60
|
data=payload,
|
60
61
|
timeout=99999)
|
@@ -102,7 +103,7 @@ def _insert_code(dataset_name, hash_id, presigned_url, project_name):
|
|
102
103
|
|
103
104
|
try:
|
104
105
|
response = requests.request("POST",
|
105
|
-
f"{
|
106
|
+
f"{RagaAICatalyst.BASE_URL}/v2/llm/dataset/code",
|
106
107
|
headers=headers,
|
107
108
|
data=payload,
|
108
109
|
timeout=99999)
|
@@ -26,6 +26,17 @@ def extract_model_name(args, kwargs, result):
|
|
26
26
|
# Try model attribute
|
27
27
|
elif hasattr(instance, "model"):
|
28
28
|
model = instance.model
|
29
|
+
|
30
|
+
# Handle vertex ai case
|
31
|
+
if not model:
|
32
|
+
manager = kwargs.get("run_manager", None)
|
33
|
+
if manager:
|
34
|
+
if hasattr(manager, 'metadata'):
|
35
|
+
metadata = manager.metadata
|
36
|
+
model_name = metadata.get('ls_model_name', None)
|
37
|
+
if model_name:
|
38
|
+
model = model_name
|
39
|
+
|
29
40
|
|
30
41
|
# Normalize Google model names
|
31
42
|
if model and isinstance(model, str):
|
@@ -98,6 +109,30 @@ def extract_token_usage(result):
|
|
98
109
|
"total_tokens": getattr(metadata, "total_token_count", 0)
|
99
110
|
}
|
100
111
|
|
112
|
+
# Handle ChatResult format with generations
|
113
|
+
if hasattr(result, "generations") and result.generations:
|
114
|
+
# Get the first generation
|
115
|
+
generation = result.generations[0]
|
116
|
+
|
117
|
+
# Try to get usage from generation_info
|
118
|
+
if hasattr(generation, "generation_info"):
|
119
|
+
metadata = generation.generation_info.get("usage_metadata", {})
|
120
|
+
if metadata:
|
121
|
+
return {
|
122
|
+
"prompt_tokens": metadata.get("prompt_token_count", 0),
|
123
|
+
"completion_tokens": metadata.get("candidates_token_count", 0),
|
124
|
+
"total_tokens": metadata.get("total_token_count", 0)
|
125
|
+
}
|
126
|
+
|
127
|
+
# Try to get usage from message's usage_metadata
|
128
|
+
if hasattr(generation, "message") and hasattr(generation.message, "usage_metadata"):
|
129
|
+
metadata = generation.message.usage_metadata
|
130
|
+
return {
|
131
|
+
"prompt_tokens": metadata.get("input_tokens", 0),
|
132
|
+
"completion_tokens": metadata.get("output_tokens", 0),
|
133
|
+
"total_tokens": metadata.get("total_tokens", 0)
|
134
|
+
}
|
135
|
+
|
101
136
|
# Handle Vertex AI format
|
102
137
|
if hasattr(result, "text"):
|
103
138
|
# For LangChain ChatVertexAI
|
@@ -194,7 +229,7 @@ def extract_llm_output(result):
|
|
194
229
|
else:
|
195
230
|
# We're in an async context, but this function is called synchronously
|
196
231
|
# Return a placeholder and let the caller handle the coroutine
|
197
|
-
return OutputResponse("Coroutine result pending")
|
232
|
+
return OutputResponse([{'content': "Coroutine result pending", "role": "assistant"}])
|
198
233
|
|
199
234
|
# Handle Google GenerativeAI format
|
200
235
|
if hasattr(result, "result"):
|
@@ -213,11 +248,23 @@ def extract_llm_output(result):
|
|
213
248
|
return OutputResponse(output)
|
214
249
|
|
215
250
|
# Handle Vertex AI format
|
251
|
+
# format1
|
216
252
|
if hasattr(result, "text"):
|
217
253
|
return OutputResponse([{
|
218
254
|
"content": result.text,
|
219
255
|
"role": "assistant"
|
220
256
|
}])
|
257
|
+
|
258
|
+
|
259
|
+
# format2
|
260
|
+
if hasattr(result, "generations"):
|
261
|
+
output = []
|
262
|
+
for generation in result.generations:
|
263
|
+
output.append({
|
264
|
+
"content": generation.text,
|
265
|
+
"role": "assistant"
|
266
|
+
})
|
267
|
+
return OutputResponse(output)
|
221
268
|
|
222
269
|
# Handle OpenAI format
|
223
270
|
if hasattr(result, "choices"):
|
@@ -225,16 +272,17 @@ def extract_llm_output(result):
|
|
225
272
|
"content": choice.message.content,
|
226
273
|
"role": choice.message.role
|
227
274
|
} for choice in result.choices])
|
228
|
-
|
275
|
+
|
276
|
+
|
229
277
|
# Handle Anthropic format
|
230
|
-
if hasattr(result, "
|
278
|
+
if hasattr(result, "content"):
|
231
279
|
return OutputResponse([{
|
232
|
-
"content": result.
|
280
|
+
"content": result.content[0].text,
|
233
281
|
"role": "assistant"
|
234
282
|
}])
|
235
283
|
|
236
284
|
# Default case
|
237
|
-
return OutputResponse(
|
285
|
+
return OutputResponse([{'content': result, 'role': 'assistant'}])
|
238
286
|
|
239
287
|
|
240
288
|
def extract_llm_data(args, kwargs, result):
|
@@ -55,11 +55,7 @@ class RagaExporter:
|
|
55
55
|
"""
|
56
56
|
self.project_name = project_name
|
57
57
|
self.dataset_name = dataset_name
|
58
|
-
RagaExporter.BASE_URL =
|
59
|
-
os.getenv("RAGAAI_CATALYST_BASE_URL")
|
60
|
-
if os.getenv("RAGAAI_CATALYST_BASE_URL")
|
61
|
-
else "https://catalyst.raga.ai/api"
|
62
|
-
)
|
58
|
+
RagaExporter.BASE_URL = RagaAICatalyst.BASE_URL
|
63
59
|
self.access_key = os.getenv("RAGAAI_CATALYST_ACCESS_KEY")
|
64
60
|
self.secret_key = os.getenv("RAGAAI_CATALYST_SECRET_KEY")
|
65
61
|
self.max_urls = 20
|
@@ -63,7 +63,7 @@ class Tracer(AgenticTracing):
|
|
63
63
|
super().__init__(user_detail=user_detail, auto_instrument_llm=True)
|
64
64
|
self.is_active = True
|
65
65
|
self.project_name = project_name
|
66
|
-
self.dataset_name
|
66
|
+
# Removed self.dataset_name assignment as it's handled by parent class
|
67
67
|
self.tracer_type = tracer_type
|
68
68
|
self.metadata = self._improve_metadata(metadata, tracer_type)
|
69
69
|
# self.metadata["total_cost"] = 0.0
|
@@ -288,20 +288,22 @@ class Tracer(AgenticTracing):
|
|
288
288
|
# Note: We're not resetting all attributes here to allow for upload status checking
|
289
289
|
|
290
290
|
def _pass_user_data(self):
|
291
|
-
|
291
|
+
user_detail = {
|
292
|
+
"project_name":self.project_name,
|
293
|
+
"project_id": self.project_id,
|
294
|
+
"dataset_name":self.dataset_name,
|
295
|
+
"trace_user_detail" : {
|
292
296
|
"project_id": self.project_id,
|
293
|
-
"
|
294
|
-
"
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
"
|
300
|
-
"
|
301
|
-
"
|
302
|
-
"llm_model": self.pipeline["llm_model"],
|
303
|
-
"vector_store": self.pipeline["vector_store"],
|
304
|
-
"embed_model": self.pipeline["embed_model"]
|
305
|
-
}
|
297
|
+
"trace_id": "",
|
298
|
+
"session_id": None,
|
299
|
+
"trace_type": self.tracer_type,
|
300
|
+
"traces": [],
|
301
|
+
"metadata": self.metadata,
|
302
|
+
"pipeline": {
|
303
|
+
"llm_model": (getattr(self, "pipeline", {}) or {}).get("llm_model", ""),
|
304
|
+
"vector_store": (getattr(self, "pipeline", {}) or {}).get("vector_store", ""),
|
305
|
+
"embed_model": (getattr(self, "pipeline", {}) or {}).get("embed_model", "")
|
306
306
|
}
|
307
|
-
}
|
307
|
+
}
|
308
|
+
}
|
309
|
+
return user_detail
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.2
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>
|
6
6
|
Requires-Python: <3.13,>=3.9
|
@@ -19,7 +19,7 @@ Requires-Dist: opentelemetry-instrumentation-openai~=0.24.0
|
|
19
19
|
Requires-Dist: langchain-core>=0.2.11
|
20
20
|
Requires-Dist: langchain>=0.2.11
|
21
21
|
Requires-Dist: openai>=1.57.0
|
22
|
-
Requires-Dist: pandas
|
22
|
+
Requires-Dist: pandas
|
23
23
|
Requires-Dist: groq>=0.11.0
|
24
24
|
Requires-Dist: PyPDF2>=3.0.1
|
25
25
|
Requires-Dist: google-generativeai>=0.8.2
|
@@ -1,8 +1,8 @@
|
|
1
1
|
ragaai_catalyst/__init__.py,sha256=BdIJ_UUre0uEnRTsLw_hE0C0muWk6XWNZqdVOel22R4,537
|
2
2
|
ragaai_catalyst/_version.py,sha256=JKt9KaVNOMVeGs8ojO6LvIZr7ZkMzNN-gCcvryy4x8E,460
|
3
|
-
ragaai_catalyst/dataset.py,sha256=
|
3
|
+
ragaai_catalyst/dataset.py,sha256=aTRvZicAXmrC0KdmmsoJH_rtEJrxbqYmf1P806c1Wg0,10521
|
4
4
|
ragaai_catalyst/evaluation.py,sha256=34H2bYZNSrcu0jMQgDZw1OLVbQU80PaVLo2avju8POM,20311
|
5
|
-
ragaai_catalyst/experiment.py,sha256=
|
5
|
+
ragaai_catalyst/experiment.py,sha256=8yQo1phCHlpnJ-4CqCaIbLXg_1ZlAuLGI9kqGBl-OTE,18859
|
6
6
|
ragaai_catalyst/guard_executor.py,sha256=llPbE3DyVtrybojXknzBZj8-dtUrGBQwi9-ZiPJxGRo,3762
|
7
7
|
ragaai_catalyst/guardrails_manager.py,sha256=DILMOAASK57FH9BLq_8yC1AQzRJ8McMFLwCXgYwNAd4,11904
|
8
8
|
ragaai_catalyst/internal_api_completion.py,sha256=DdICI5yfEudiOAIC8L4oxH0Qz7kX-BZCdo9IWsi2gNo,2965
|
@@ -13,12 +13,12 @@ ragaai_catalyst/synthetic_data_generation.py,sha256=uDV9tNwto2xSkWg5XHXUvjErW-4P
|
|
13
13
|
ragaai_catalyst/utils.py,sha256=TlhEFwLyRU690HvANbyoRycR3nQ67lxVUQoUOfTPYQ0,3772
|
14
14
|
ragaai_catalyst/tracers/__init__.py,sha256=yxepo7iVjTNI_wFdk3Z6Ghu64SazVyszCPEHYrX5WQk,50
|
15
15
|
ragaai_catalyst/tracers/llamaindex_callback.py,sha256=vPE7MieKjfwLrLUnnPs20Df0xNYqoCCj-Mt2NbiuiKU,14023
|
16
|
-
ragaai_catalyst/tracers/tracer.py,sha256=
|
16
|
+
ragaai_catalyst/tracers/tracer.py,sha256=Hg4S0ZZ-YZsVzxx68b8DdKAwqxVtIA7u0g5mcCBfDxw,12754
|
17
17
|
ragaai_catalyst/tracers/upload_traces.py,sha256=hs0PEmit3n3_uUqrdbwcBdyK5Nbkik3JQVwJMEwYTd4,4796
|
18
18
|
ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
|
19
19
|
ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
|
20
20
|
ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
21
|
-
ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py,sha256=
|
21
|
+
ragaai_catalyst/tracers/agentic_tracing/data/data_structure.py,sha256=SYRDLROjdS8FR1J_1o2h5Ydv92kcnDs_powrgFaMjaY,8235
|
22
22
|
ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb,sha256=0qZxjWqYCTAVvdo3Tsp544D8Am48wfeMQ9RKpKgAL8g,34291
|
23
23
|
ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb,sha256=QCMFJYbGX0fd9eMW4PqyQLZjyWuTXo7n1nqO_hMLf0s,4225
|
24
24
|
ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb,sha256=fU3inXoemJbdTkGAQl_N1UwVEZ10LrKv4gCEpbQ4ISg,43481
|
@@ -26,35 +26,35 @@ ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py,sha256=47DEQpj8HBSa-_T
|
|
26
26
|
ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py,sha256=S4rCcKzU_5SB62BYEbNn_1VbbTdG4396N8rdZ3ZNGcE,5654
|
27
27
|
ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py,sha256=Xk1cLzs-2A3dgyBwRRnCWs7Eubki40FVonwd433hPN8,4805
|
28
28
|
ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
29
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py,sha256=
|
30
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=
|
31
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=
|
32
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=
|
29
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py,sha256=FOKJUMd48s8NFoTiZINeWBPA4IjMo5o1r9nuDEzIerA,20945
|
30
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=AzfzKPnLHFbkOTXxiaA-vYzXPA1WyN-pUX624WcWPXg,14387
|
31
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=EWcse5vJdOO5a3uSuEL6TKRsnvKIW_H9qbUVgRTxq1M,25449
|
32
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=tdwkpWx_eSyXtbpWTo9RG_cF_6q1F8oB0XyF4OiRKh8,10363
|
33
33
|
ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py,sha256=6FTA15xMnum9omM_0Jd9cMIuWdKu1gR5Tc8fOXAkP8E,10068
|
34
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=
|
34
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=Rln82_4BEL_4ULOIhWwu2-qGQ8J1yEaf9qKsEBuj76o,8427
|
35
35
|
ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py,sha256=wsCwTK7tM_L3mdNrcg5Mq3D1k07XCHZkhOB26kz_rLY,1472
|
36
36
|
ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
37
37
|
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=2Ab8odZXMpYFTh8a7tO53qx9RptF-xxyfgtFN0A6GzI,7690
|
38
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=
|
38
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=HgpMgI-JTWZrizcM7GGUIaAgaZF4aRT3D0dJXVEkblY,4271
|
39
39
|
ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=XdB3X_ufe4RVvGorxSqAiB9dYv4UD7Hvvuw3bsDUppY,60
|
40
40
|
ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py,sha256=JyNCbfpW-w4O9CjtemTqmor2Rh1WGpQwhRaDSRmBxw8,689
|
41
41
|
ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py,sha256=515NNDQJTyy3O-2rdlUYUoWL9qSwLIfvV3sMB9BtHp8,1366
|
42
42
|
ragaai_catalyst/tracers/agentic_tracing/utils/generic.py,sha256=WwXT01xmp8MSr7KinuDCSK9a1ifpLcT7ajFkvYviG_A,1190
|
43
|
-
ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=
|
43
|
+
ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=F9J2K5RoHGploox51fxut0RI5KBPIuUhoJyLT0Lhv3Y,14085
|
44
44
|
ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=6wnDtkBH-uwJeZm9FtyeXuUWux8u-skT3lmrtFwsReI,286298
|
45
45
|
ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=9cFzfFqIA968bUG7LNTjdN7zbdEXUtcvRKg883ade2c,2586
|
46
46
|
ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=DQHjcEuqEKsNSWaNs7SoOaq50yK4Jsl966S7mBnV-zA,5723
|
47
47
|
ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=faFat_OAUnVJGnauMVo6yeHhTv-_njgyXGOtUwYJ8kE,7568
|
48
48
|
ragaai_catalyst/tracers/exporters/__init__.py,sha256=kVA8zp05h3phu4e-iHSlnznp_PzMRczB7LphSsZgUjg,138
|
49
49
|
ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=RgGteu-NVGprXKkynvyIO5yOjpbtA41R3W_NzCjnkwE,6445
|
50
|
-
ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=
|
50
|
+
ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=6xvjWXyh8XPkHKSLLmAZUQSvwuyY17ov8pv2VdfI0qA,17875
|
51
51
|
ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=FgnMQupoRTzmVsG9YKsLQera2Pfs-AluZv8CxwavoyQ,253
|
52
52
|
ragaai_catalyst/tracers/instrumentators/langchain.py,sha256=yMN0qVF0pUVk6R5M1vJoUXezDo1ejs4klCFRlE8x4vE,574
|
53
53
|
ragaai_catalyst/tracers/instrumentators/llamaindex.py,sha256=SMrRlR4xM7k9HK43hakE8rkrWHxMlmtmWD-AX6TeByc,416
|
54
54
|
ragaai_catalyst/tracers/instrumentators/openai.py,sha256=14R4KW9wQCR1xysLfsP_nxS7cqXrTPoD8En4MBAaZUU,379
|
55
55
|
ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
|
56
56
|
ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
|
57
|
-
ragaai_catalyst-2.1.
|
58
|
-
ragaai_catalyst-2.1.
|
59
|
-
ragaai_catalyst-2.1.
|
60
|
-
ragaai_catalyst-2.1.
|
57
|
+
ragaai_catalyst-2.1.2.dist-info/METADATA,sha256=3OyLAtqZ_Om8WkF7SAQsmhF1CRAOeWAo519j1M9zboo,1802
|
58
|
+
ragaai_catalyst-2.1.2.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
59
|
+
ragaai_catalyst-2.1.2.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
|
60
|
+
ragaai_catalyst-2.1.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|