ragaai-catalyst 2.1b0__py3-none-any.whl → 2.1b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/__init__.py +1 -0
- ragaai_catalyst/dataset.py +1 -4
- ragaai_catalyst/evaluation.py +4 -5
- ragaai_catalyst/guard_executor.py +97 -0
- ragaai_catalyst/guardrails_manager.py +41 -15
- ragaai_catalyst/internal_api_completion.py +1 -1
- ragaai_catalyst/prompt_manager.py +7 -2
- ragaai_catalyst/ragaai_catalyst.py +1 -1
- ragaai_catalyst/synthetic_data_generation.py +7 -0
- ragaai_catalyst/tracers/__init__.py +1 -1
- ragaai_catalyst/tracers/agentic_tracing/__init__.py +3 -0
- ragaai_catalyst/tracers/agentic_tracing/agent_tracer.py +422 -0
- ragaai_catalyst/tracers/agentic_tracing/agentic_tracing.py +198 -0
- ragaai_catalyst/tracers/agentic_tracing/base.py +376 -0
- ragaai_catalyst/tracers/agentic_tracing/data_structure.py +248 -0
- ragaai_catalyst/tracers/agentic_tracing/examples/FinancialAnalysisSystem.ipynb +536 -0
- ragaai_catalyst/tracers/agentic_tracing/examples/GameActivityEventPlanner.ipynb +134 -0
- ragaai_catalyst/tracers/agentic_tracing/examples/TravelPlanner.ipynb +563 -0
- ragaai_catalyst/tracers/agentic_tracing/file_name_tracker.py +46 -0
- ragaai_catalyst/tracers/agentic_tracing/llm_tracer.py +808 -0
- ragaai_catalyst/tracers/agentic_tracing/network_tracer.py +286 -0
- ragaai_catalyst/tracers/agentic_tracing/sample.py +197 -0
- ragaai_catalyst/tracers/agentic_tracing/tool_tracer.py +247 -0
- ragaai_catalyst/tracers/agentic_tracing/unique_decorator.py +165 -0
- ragaai_catalyst/tracers/agentic_tracing/unique_decorator_test.py +172 -0
- ragaai_catalyst/tracers/agentic_tracing/upload_agentic_traces.py +187 -0
- ragaai_catalyst/tracers/agentic_tracing/upload_code.py +115 -0
- ragaai_catalyst/tracers/agentic_tracing/user_interaction_tracer.py +43 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +3 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +18 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/data_classes.py +61 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +32 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py +177 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +7823 -0
- ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py +74 -0
- ragaai_catalyst/tracers/agentic_tracing/zip_list_of_unique_files.py +342 -0
- ragaai_catalyst/tracers/exporters/raga_exporter.py +1 -7
- ragaai_catalyst/tracers/tracer.py +30 -4
- ragaai_catalyst/tracers/upload_traces.py +127 -0
- ragaai_catalyst-2.1b1.dist-info/METADATA +43 -0
- ragaai_catalyst-2.1b1.dist-info/RECORD +56 -0
- {ragaai_catalyst-2.1b0.dist-info → ragaai_catalyst-2.1b1.dist-info}/WHEEL +1 -1
- ragaai_catalyst-2.1b0.dist-info/METADATA +0 -295
- ragaai_catalyst-2.1b0.dist-info/RECORD +0 -28
- {ragaai_catalyst-2.1b0.dist-info → ragaai_catalyst-2.1b1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,376 @@
|
|
1
|
+
import json
|
2
|
+
import os
|
3
|
+
import platform
|
4
|
+
import re
|
5
|
+
import psutil
|
6
|
+
import pkg_resources
|
7
|
+
from datetime import datetime
|
8
|
+
from pathlib import Path
|
9
|
+
from typing import Optional, Dict, Any, List
|
10
|
+
import uuid
|
11
|
+
import sys
|
12
|
+
|
13
|
+
from .data_structure import (
|
14
|
+
Trace, Metadata, SystemInfo, OSInfo, EnvironmentInfo,
|
15
|
+
Resources, CPUResource, MemoryResource, DiskResource, NetworkResource,
|
16
|
+
ResourceInfo, MemoryInfo, DiskInfo, NetworkInfo,
|
17
|
+
Component, LLMComponent, AgentComponent, ToolComponent,
|
18
|
+
NetworkCall, Interaction, Error
|
19
|
+
)
|
20
|
+
|
21
|
+
from ..upload_traces import UploadTraces
|
22
|
+
from .upload_agentic_traces import UploadAgenticTraces
|
23
|
+
from .upload_code import upload_code
|
24
|
+
from ...ragaai_catalyst import RagaAICatalyst
|
25
|
+
|
26
|
+
from .file_name_tracker import TrackName
|
27
|
+
from .zip_list_of_unique_files import zip_list_of_unique_files
|
28
|
+
|
29
|
+
class TracerJSONEncoder(json.JSONEncoder):
|
30
|
+
def default(self, obj):
|
31
|
+
if isinstance(obj, datetime):
|
32
|
+
return obj.isoformat()
|
33
|
+
if isinstance(obj, bytes):
|
34
|
+
try:
|
35
|
+
return obj.decode('utf-8')
|
36
|
+
except UnicodeDecodeError:
|
37
|
+
return str(obj) # Fallback to string representation
|
38
|
+
if hasattr(obj, 'to_dict'): # Handle objects with to_dict method
|
39
|
+
return obj.to_dict()
|
40
|
+
if hasattr(obj, '__dict__'):
|
41
|
+
# Filter out None values and handle nested serialization
|
42
|
+
return {k: v for k, v in obj.__dict__.items()
|
43
|
+
if v is not None and not k.startswith('_')}
|
44
|
+
try:
|
45
|
+
# Try to convert to a basic type
|
46
|
+
return str(obj)
|
47
|
+
except:
|
48
|
+
return None # Last resort: return None instead of failing
|
49
|
+
|
50
|
+
class BaseTracer:
|
51
|
+
def __init__(self, user_details):
|
52
|
+
self.user_details = user_details
|
53
|
+
self.project_name = self.user_details['project_name'] # Access the project_name
|
54
|
+
self.dataset_name = self.user_details['dataset_name'] # Access the dataset_name
|
55
|
+
self.project_id = self.user_details['project_id'] # Access the project_id
|
56
|
+
|
57
|
+
# Initialize trace data
|
58
|
+
self.trace_id = None
|
59
|
+
self.start_time = None
|
60
|
+
self.components: List[Component] = []
|
61
|
+
self.file_tracker = TrackName()
|
62
|
+
|
63
|
+
def _get_system_info(self) -> SystemInfo:
|
64
|
+
# Get OS info
|
65
|
+
os_info = OSInfo(
|
66
|
+
name=platform.system(),
|
67
|
+
version=platform.version(),
|
68
|
+
platform=platform.machine(),
|
69
|
+
kernel_version=platform.release()
|
70
|
+
)
|
71
|
+
|
72
|
+
# Get Python environment info
|
73
|
+
installed_packages = [f"{pkg.key}=={pkg.version}" for pkg in pkg_resources.working_set]
|
74
|
+
env_info = EnvironmentInfo(
|
75
|
+
name="Python",
|
76
|
+
version=platform.python_version(),
|
77
|
+
packages=installed_packages,
|
78
|
+
env_path=sys.prefix,
|
79
|
+
command_to_run=f"python {sys.argv[0]}"
|
80
|
+
)
|
81
|
+
|
82
|
+
return SystemInfo(
|
83
|
+
id=f"sys_{self.trace_id}",
|
84
|
+
os=os_info,
|
85
|
+
environment=env_info,
|
86
|
+
source_code="Path to the source code .zip file in format hashid.zip" # TODO: Implement source code archiving
|
87
|
+
)
|
88
|
+
|
89
|
+
def _get_resources(self) -> Resources:
|
90
|
+
# CPU info
|
91
|
+
cpu_info = ResourceInfo(
|
92
|
+
name=platform.processor(),
|
93
|
+
cores=psutil.cpu_count(logical=False),
|
94
|
+
threads=psutil.cpu_count(logical=True)
|
95
|
+
)
|
96
|
+
cpu = CPUResource(
|
97
|
+
info=cpu_info,
|
98
|
+
interval="5s",
|
99
|
+
values=[psutil.cpu_percent()]
|
100
|
+
)
|
101
|
+
|
102
|
+
# Memory info
|
103
|
+
memory = psutil.virtual_memory()
|
104
|
+
mem_info = MemoryInfo(
|
105
|
+
total=memory.total / (1024**3), # Convert to GB
|
106
|
+
free=memory.available / (1024**3)
|
107
|
+
)
|
108
|
+
mem = MemoryResource(
|
109
|
+
info=mem_info,
|
110
|
+
interval="5s",
|
111
|
+
values=[memory.percent]
|
112
|
+
)
|
113
|
+
|
114
|
+
# Disk info
|
115
|
+
disk = psutil.disk_usage('/')
|
116
|
+
disk_info = DiskInfo(
|
117
|
+
total=disk.total / (1024**3),
|
118
|
+
free=disk.free / (1024**3)
|
119
|
+
)
|
120
|
+
disk_io = psutil.disk_io_counters()
|
121
|
+
disk_resource = DiskResource(
|
122
|
+
info=disk_info,
|
123
|
+
interval="5s",
|
124
|
+
read=[disk_io.read_bytes / (1024**2)], # MB
|
125
|
+
write=[disk_io.write_bytes / (1024**2)]
|
126
|
+
)
|
127
|
+
|
128
|
+
# Network info
|
129
|
+
net_io = psutil.net_io_counters()
|
130
|
+
net_info = NetworkInfo(
|
131
|
+
upload_speed=net_io.bytes_sent / (1024**2), # MB
|
132
|
+
download_speed=net_io.bytes_recv / (1024**2)
|
133
|
+
)
|
134
|
+
net = NetworkResource(
|
135
|
+
info=net_info,
|
136
|
+
interval="5s",
|
137
|
+
uploads=[net_io.bytes_sent / (1024**2)],
|
138
|
+
downloads=[net_io.bytes_recv / (1024**2)]
|
139
|
+
)
|
140
|
+
|
141
|
+
return Resources(cpu=cpu, memory=mem, disk=disk_resource, network=net)
|
142
|
+
|
143
|
+
def start(self):
|
144
|
+
"""Initialize a new trace"""
|
145
|
+
metadata = Metadata(
|
146
|
+
cost={},
|
147
|
+
tokens={},
|
148
|
+
system_info=self._get_system_info(),
|
149
|
+
resources=self._get_resources()
|
150
|
+
)
|
151
|
+
|
152
|
+
# Generate a unique trace ID, when trace starts
|
153
|
+
self.trace_id = str(uuid.uuid4())
|
154
|
+
|
155
|
+
# Get the start time
|
156
|
+
self.start_time = datetime.now().isoformat()
|
157
|
+
|
158
|
+
self.data_key = [{"start_time": datetime.now().isoformat(),
|
159
|
+
"end_time": "",
|
160
|
+
"spans": self.components
|
161
|
+
}]
|
162
|
+
|
163
|
+
self.trace = Trace(
|
164
|
+
id=self.trace_id,
|
165
|
+
project_name=self.project_name,
|
166
|
+
start_time=datetime.now().isoformat(),
|
167
|
+
end_time="", # Will be set when trace is stopped
|
168
|
+
metadata=metadata,
|
169
|
+
data=self.data_key,
|
170
|
+
replays={"source": None}
|
171
|
+
)
|
172
|
+
|
173
|
+
def stop(self):
|
174
|
+
"""Stop the trace and save to JSON file"""
|
175
|
+
if hasattr(self, 'trace'):
|
176
|
+
self.trace.data[0]["end_time"] = datetime.now().isoformat()
|
177
|
+
self.trace.end_time = datetime.now().isoformat()
|
178
|
+
|
179
|
+
# Change span ids to int
|
180
|
+
self.trace = self._change_span_ids_to_int(self.trace)
|
181
|
+
self.trace = self._change_agent_input_output(self.trace)
|
182
|
+
self.trace = self._extract_cost_tokens(self.trace)
|
183
|
+
|
184
|
+
# Create traces directory if it doesn't exist
|
185
|
+
self.traces_dir = Path("traces")
|
186
|
+
self.traces_dir.mkdir(exist_ok=True)
|
187
|
+
filename = self.trace.id + ".json"
|
188
|
+
filepath = self.traces_dir / filename
|
189
|
+
|
190
|
+
#get unique files and zip it. Generate a unique hash ID for the contents of the files
|
191
|
+
list_of_unique_files = self.file_tracker.get_unique_files()
|
192
|
+
hash_id, zip_path = zip_list_of_unique_files(list_of_unique_files)
|
193
|
+
|
194
|
+
#replace source code with zip_path
|
195
|
+
self.trace.metadata.system_info.source_code = hash_id
|
196
|
+
|
197
|
+
# Clean up trace_data before saving
|
198
|
+
trace_data = self.trace.__dict__
|
199
|
+
cleaned_trace_data = self._clean_trace(trace_data)
|
200
|
+
|
201
|
+
with open(filepath, 'w') as f:
|
202
|
+
json.dump(cleaned_trace_data, f, cls=TracerJSONEncoder, indent=2)
|
203
|
+
|
204
|
+
print(f"Trace saved to {filepath}")
|
205
|
+
# Upload traces
|
206
|
+
json_file_path = str(filepath)
|
207
|
+
project_name = self.project_name
|
208
|
+
project_id = self.project_id
|
209
|
+
dataset_name = self.dataset_name
|
210
|
+
user_detail = self.user_details
|
211
|
+
base_url = os.getenv('RAGAAI_CATALYST_BASE_URL')
|
212
|
+
upload_traces = UploadAgenticTraces(
|
213
|
+
json_file_path=json_file_path,
|
214
|
+
project_name=project_name,
|
215
|
+
project_id=project_id,
|
216
|
+
dataset_name=dataset_name,
|
217
|
+
user_detail=user_detail,
|
218
|
+
base_url=base_url
|
219
|
+
)
|
220
|
+
upload_traces.upload_agentic_traces()
|
221
|
+
|
222
|
+
#Upload Codehash
|
223
|
+
response = upload_code(
|
224
|
+
hash_id=hash_id,
|
225
|
+
zip_path=zip_path,
|
226
|
+
project_name=project_name,
|
227
|
+
dataset_name=dataset_name
|
228
|
+
)
|
229
|
+
print(response)
|
230
|
+
|
231
|
+
# Cleanup
|
232
|
+
self.components = []
|
233
|
+
self.file_tracker = TrackName()
|
234
|
+
|
235
|
+
def add_component(self, component: Component):
|
236
|
+
"""Add a component to the trace"""
|
237
|
+
self.components.append(component)
|
238
|
+
|
239
|
+
def __enter__(self):
|
240
|
+
self.start()
|
241
|
+
return self
|
242
|
+
|
243
|
+
def __exit__(self, exc_type, exc_value, traceback):
|
244
|
+
self.stop()
|
245
|
+
|
246
|
+
def _change_span_ids_to_int(self, trace):
|
247
|
+
id, parent_id = 1, 0
|
248
|
+
for span in trace.data[0]["spans"]:
|
249
|
+
span.id = id
|
250
|
+
span.parent_id = parent_id
|
251
|
+
id += 1
|
252
|
+
if span.type=="agent":
|
253
|
+
for children in span.data["children"]:
|
254
|
+
children["id"] = id
|
255
|
+
children["parent_id"] = span.id
|
256
|
+
id += 1
|
257
|
+
return trace
|
258
|
+
|
259
|
+
def _change_agent_input_output(self, trace):
|
260
|
+
for span in trace.data[0]["spans"]:
|
261
|
+
if span.type == "agent":
|
262
|
+
childrens = span.data["children"]
|
263
|
+
span.data["input"] = None
|
264
|
+
span.data["output"] = None
|
265
|
+
if childrens:
|
266
|
+
# Find first non-null input going forward
|
267
|
+
for child in childrens:
|
268
|
+
if "data" not in child:
|
269
|
+
continue
|
270
|
+
input_data = child["data"].get("input")
|
271
|
+
|
272
|
+
if input_data:
|
273
|
+
span.data["input"] = input_data['args'] if hasattr(input_data, 'args') else input_data
|
274
|
+
break
|
275
|
+
|
276
|
+
# Find first non-null output going backward
|
277
|
+
for child in reversed(childrens):
|
278
|
+
if "data" not in child:
|
279
|
+
continue
|
280
|
+
output_data = child["data"].get("output")
|
281
|
+
|
282
|
+
if output_data and output_data != "" and output_data != "None":
|
283
|
+
span.data["output"] = output_data
|
284
|
+
break
|
285
|
+
return trace
|
286
|
+
|
287
|
+
def _extract_cost_tokens(self, trace):
|
288
|
+
cost = {}
|
289
|
+
tokens = {}
|
290
|
+
for span in trace.data[0]["spans"]:
|
291
|
+
if span.type == "llm":
|
292
|
+
info = span.info
|
293
|
+
if isinstance(info, dict):
|
294
|
+
cost_info = info.get('cost', {})
|
295
|
+
for key, value in cost_info.items():
|
296
|
+
if key not in cost:
|
297
|
+
cost[key] = 0
|
298
|
+
cost[key] += value
|
299
|
+
token_info = info.get('tokens', {})
|
300
|
+
for key, value in token_info.items():
|
301
|
+
if key not in tokens:
|
302
|
+
tokens[key] = 0
|
303
|
+
tokens[key] += value
|
304
|
+
if span.type == "agent":
|
305
|
+
for children in span.data["children"]:
|
306
|
+
if 'type' not in children:
|
307
|
+
continue
|
308
|
+
if children["type"] != "llm":
|
309
|
+
continue
|
310
|
+
info = children["info"]
|
311
|
+
if isinstance(info, dict):
|
312
|
+
cost_info = info.get('cost', {})
|
313
|
+
for key, value in cost_info.items():
|
314
|
+
if key not in cost:
|
315
|
+
cost[key] = 0
|
316
|
+
cost[key] += value
|
317
|
+
token_info = info.get('tokens', {})
|
318
|
+
for key, value in token_info.items():
|
319
|
+
if key not in tokens:
|
320
|
+
tokens[key] = 0
|
321
|
+
tokens[key] += value
|
322
|
+
trace.metadata.cost = cost
|
323
|
+
trace.metadata.tokens = tokens
|
324
|
+
return trace
|
325
|
+
|
326
|
+
def _clean_trace(self, trace):
|
327
|
+
# Convert span to dict if it has to_dict method
|
328
|
+
def _to_dict_if_needed(obj):
|
329
|
+
if hasattr(obj, 'to_dict'):
|
330
|
+
return obj.to_dict()
|
331
|
+
return obj
|
332
|
+
|
333
|
+
def deduplicate_spans(spans):
|
334
|
+
seen_llm_spans = {} # Dictionary to track unique LLM spans
|
335
|
+
unique_spans = []
|
336
|
+
|
337
|
+
for span in spans:
|
338
|
+
# Convert span to dictionary if needed
|
339
|
+
span_dict = _to_dict_if_needed(span)
|
340
|
+
|
341
|
+
# Skip spans without hash_id
|
342
|
+
if 'hash_id' not in span_dict:
|
343
|
+
continue
|
344
|
+
|
345
|
+
if span_dict.get('type') == 'llm':
|
346
|
+
# Create a unique key based on hash_id, input, and output
|
347
|
+
span_key = (
|
348
|
+
span_dict.get('hash_id'),
|
349
|
+
str(span_dict.get('data', {}).get('input')),
|
350
|
+
str(span_dict.get('data', {}).get('output'))
|
351
|
+
)
|
352
|
+
|
353
|
+
if span_key not in seen_llm_spans:
|
354
|
+
seen_llm_spans[span_key] = True
|
355
|
+
unique_spans.append(span)
|
356
|
+
else:
|
357
|
+
# For non-LLM spans, process their children if they exist
|
358
|
+
if 'data' in span_dict and 'children' in span_dict['data']:
|
359
|
+
children = span_dict['data']['children']
|
360
|
+
# Filter and deduplicate children
|
361
|
+
filtered_children = deduplicate_spans(children)
|
362
|
+
if isinstance(span, dict):
|
363
|
+
span['data']['children'] = filtered_children
|
364
|
+
else:
|
365
|
+
span.data['children'] = filtered_children
|
366
|
+
unique_spans.append(span)
|
367
|
+
|
368
|
+
return unique_spans
|
369
|
+
|
370
|
+
# Remove any spans without hash ids
|
371
|
+
for data in trace.get('data', []):
|
372
|
+
if 'spans' in data:
|
373
|
+
# First filter out spans without hash_ids, then deduplicate
|
374
|
+
data['spans'] = deduplicate_spans(data['spans'])
|
375
|
+
|
376
|
+
return trace
|
@@ -0,0 +1,248 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
from typing import List, Dict, Optional, Any, Union
|
3
|
+
from datetime import datetime
|
4
|
+
import uuid
|
5
|
+
|
6
|
+
@dataclass
|
7
|
+
class OSInfo:
|
8
|
+
name: str
|
9
|
+
version: str
|
10
|
+
platform: str
|
11
|
+
kernel_version: str
|
12
|
+
|
13
|
+
@dataclass
|
14
|
+
class EnvironmentInfo:
|
15
|
+
name: str
|
16
|
+
version: str
|
17
|
+
packages: List[str]
|
18
|
+
env_path: str
|
19
|
+
command_to_run: str
|
20
|
+
|
21
|
+
@dataclass
|
22
|
+
class SystemInfo:
|
23
|
+
id: str
|
24
|
+
os: OSInfo
|
25
|
+
environment: EnvironmentInfo
|
26
|
+
source_code: str
|
27
|
+
|
28
|
+
@dataclass
|
29
|
+
class ResourceInfo:
|
30
|
+
name: str
|
31
|
+
cores: int
|
32
|
+
threads: int
|
33
|
+
|
34
|
+
@dataclass
|
35
|
+
class CPUResource:
|
36
|
+
info: ResourceInfo
|
37
|
+
interval: str
|
38
|
+
values: List[float]
|
39
|
+
|
40
|
+
@dataclass
|
41
|
+
class MemoryInfo:
|
42
|
+
total: float
|
43
|
+
free: float
|
44
|
+
|
45
|
+
@dataclass
|
46
|
+
class MemoryResource:
|
47
|
+
info: MemoryInfo
|
48
|
+
interval: str
|
49
|
+
values: List[float]
|
50
|
+
|
51
|
+
@dataclass
|
52
|
+
class DiskInfo:
|
53
|
+
total: float
|
54
|
+
free: float
|
55
|
+
|
56
|
+
@dataclass
|
57
|
+
class DiskResource:
|
58
|
+
info: DiskInfo
|
59
|
+
interval: str
|
60
|
+
read: List[float]
|
61
|
+
write: List[float]
|
62
|
+
|
63
|
+
@dataclass
|
64
|
+
class NetworkInfo:
|
65
|
+
upload_speed: float
|
66
|
+
download_speed: float
|
67
|
+
|
68
|
+
@dataclass
|
69
|
+
class NetworkResource:
|
70
|
+
info: NetworkInfo
|
71
|
+
interval: str
|
72
|
+
uploads: List[float]
|
73
|
+
downloads: List[float]
|
74
|
+
|
75
|
+
@dataclass
|
76
|
+
class Resources:
|
77
|
+
cpu: CPUResource
|
78
|
+
memory: MemoryResource
|
79
|
+
disk: DiskResource
|
80
|
+
network: NetworkResource
|
81
|
+
|
82
|
+
@dataclass
|
83
|
+
class Metadata:
|
84
|
+
cost: Dict[str, Any]
|
85
|
+
tokens: Dict[str, Any]
|
86
|
+
system_info: SystemInfo
|
87
|
+
resources: Resources
|
88
|
+
|
89
|
+
@dataclass
|
90
|
+
class NetworkCall:
|
91
|
+
url: str
|
92
|
+
method: str
|
93
|
+
status_code: int
|
94
|
+
response_time: float
|
95
|
+
bytes_sent: int
|
96
|
+
bytes_received: int
|
97
|
+
protocol: str
|
98
|
+
connection_id: str
|
99
|
+
parent_id: str
|
100
|
+
request: Dict[str, Any]
|
101
|
+
response: Dict[str, Any]
|
102
|
+
|
103
|
+
class Interaction:
|
104
|
+
def __init__(self, id, type: str, content: str, timestamp: str):
|
105
|
+
self.id = id
|
106
|
+
self.type = type
|
107
|
+
self.content = content
|
108
|
+
self.timestamp = timestamp
|
109
|
+
|
110
|
+
def to_dict(self):
|
111
|
+
return {
|
112
|
+
"id": self.id,
|
113
|
+
"interaction_type": self.type,
|
114
|
+
"content": self.content,
|
115
|
+
"timestamp": self.timestamp
|
116
|
+
}
|
117
|
+
|
118
|
+
@dataclass
|
119
|
+
class Error:
|
120
|
+
code: int
|
121
|
+
type: str
|
122
|
+
message: str
|
123
|
+
details: Dict[str, Any]
|
124
|
+
|
125
|
+
@dataclass
|
126
|
+
class LLMParameters:
|
127
|
+
temperature: float
|
128
|
+
top_p: float
|
129
|
+
max_tokens: int
|
130
|
+
|
131
|
+
@dataclass
|
132
|
+
class TokenUsage:
|
133
|
+
prompt_tokens: int
|
134
|
+
completion_tokens: int
|
135
|
+
total_tokens: int
|
136
|
+
|
137
|
+
@dataclass
|
138
|
+
class Cost:
|
139
|
+
prompt_cost: float
|
140
|
+
completion_cost: float
|
141
|
+
total_cost: float
|
142
|
+
|
143
|
+
@dataclass
|
144
|
+
class LLMInfo:
|
145
|
+
model: str
|
146
|
+
parameters: LLMParameters
|
147
|
+
token_usage: TokenUsage
|
148
|
+
cost: Cost
|
149
|
+
|
150
|
+
@dataclass
|
151
|
+
class AgentInfo:
|
152
|
+
agent_type: str
|
153
|
+
version: str
|
154
|
+
capabilities: List[str]
|
155
|
+
|
156
|
+
@dataclass
|
157
|
+
class ToolInfo:
|
158
|
+
tool_type: str
|
159
|
+
version: str
|
160
|
+
memory_used: int
|
161
|
+
|
162
|
+
class Component:
|
163
|
+
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None, error: Optional[Error] = None):
|
164
|
+
self.id = id
|
165
|
+
self.error = error
|
166
|
+
self.hash_id = hash_id
|
167
|
+
self.type = type
|
168
|
+
self.name = name
|
169
|
+
self.start_time = start_time
|
170
|
+
self.end_time = end_time
|
171
|
+
self.parent_id = parent_id
|
172
|
+
self.info = info
|
173
|
+
self.data = data
|
174
|
+
self.network_calls = network_calls or []
|
175
|
+
self.interactions = []
|
176
|
+
if interactions:
|
177
|
+
for interaction in interactions:
|
178
|
+
if isinstance(interaction, dict):
|
179
|
+
self.interactions.append(
|
180
|
+
Interaction(
|
181
|
+
id=interaction.get("id", str(uuid.uuid4())),
|
182
|
+
type=interaction.get("interaction_type", ""),
|
183
|
+
content=str(interaction.get("content", "")),
|
184
|
+
timestamp=interaction.get("timestamp", datetime.utcnow().isoformat())
|
185
|
+
)
|
186
|
+
)
|
187
|
+
else:
|
188
|
+
self.interactions.append(interaction)
|
189
|
+
|
190
|
+
def to_dict(self):
|
191
|
+
return {
|
192
|
+
"id": self.id,
|
193
|
+
"hash_id": self.hash_id,
|
194
|
+
"type": self.type,
|
195
|
+
"name": self.name,
|
196
|
+
"start_time": self.start_time,
|
197
|
+
"end_time": self.end_time,
|
198
|
+
"parent_id": self.parent_id,
|
199
|
+
"info": self.info,
|
200
|
+
"data": self.data,
|
201
|
+
"network_calls": [call.to_dict() if hasattr(call, 'to_dict') else call for call in self.network_calls],
|
202
|
+
"interactions": self.interactions
|
203
|
+
}
|
204
|
+
|
205
|
+
class LLMComponent(Component):
|
206
|
+
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None):
|
207
|
+
super().__init__(id, hash_id, type, name, start_time, end_time, parent_id, info, data, network_calls, interactions)
|
208
|
+
|
209
|
+
class AgentComponent(Component):
|
210
|
+
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None):
|
211
|
+
super().__init__(id, hash_id, type, name, start_time, end_time, parent_id, info, data, network_calls, interactions)
|
212
|
+
|
213
|
+
class ToolComponent(Component):
|
214
|
+
def __init__(self, id: str, hash_id: str, type: str, name: str, start_time: str, end_time: str, parent_id: int, info: Dict[str, Any], data: Dict[str, Any], network_calls: Optional[List[NetworkCall]] = None, interactions: Optional[List[Union[Interaction, Dict]]] = None):
|
215
|
+
super().__init__(id, hash_id, type, name, start_time, end_time, parent_id, info, data, network_calls, interactions)
|
216
|
+
|
217
|
+
@dataclass
|
218
|
+
class ComponentInfo:
|
219
|
+
tool_type: Optional[str] = None
|
220
|
+
agent_type: Optional[str] = None
|
221
|
+
version: str = ""
|
222
|
+
capabilities: Optional[List[str]] = None
|
223
|
+
memory_used: Optional[int] = None
|
224
|
+
model: Optional[str] = None
|
225
|
+
parameters: Optional[Dict[str, Any]] = None
|
226
|
+
token_usage: Optional[Dict[str, int]] = None
|
227
|
+
cost: Optional[Dict[str, float]] = None
|
228
|
+
|
229
|
+
class Trace:
|
230
|
+
def __init__(self, id: str, project_name: str, start_time: str, end_time: str, metadata: Optional[Metadata] = None, data: Optional[List[Dict[str, Any]]] = None, replays: Optional[Dict[str, Any]] = None):
|
231
|
+
self.id = id
|
232
|
+
self.project_name = project_name
|
233
|
+
self.start_time = start_time
|
234
|
+
self.end_time = end_time
|
235
|
+
self.metadata = metadata or Metadata()
|
236
|
+
self.data = data or []
|
237
|
+
self.replays = replays
|
238
|
+
|
239
|
+
def to_dict(self):
|
240
|
+
return {
|
241
|
+
"id": self.id,
|
242
|
+
"project_name": self.project_name,
|
243
|
+
"start_time": self.start_time,
|
244
|
+
"end_time": self.end_time,
|
245
|
+
"metadata": self.metadata.to_dict() if self.metadata else None,
|
246
|
+
"data": self.data,
|
247
|
+
"replays": self.replays,
|
248
|
+
}
|