ragaai-catalyst 2.1.5b0__py3-none-any.whl → 2.1.5b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/dataset.py +330 -0
- ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +20 -4
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +36 -113
- ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +17 -2
- ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +52 -1
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +16 -19
- ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +37 -3
- ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json +17 -1
- ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py +215 -0
- ragaai_catalyst/tracers/distributed.py +46 -19
- ragaai_catalyst/tracers/tracer.py +2 -2
- {ragaai_catalyst-2.1.5b0.dist-info → ragaai_catalyst-2.1.5b2.dist-info}/METADATA +1 -1
- {ragaai_catalyst-2.1.5b0.dist-info → ragaai_catalyst-2.1.5b2.dist-info}/RECORD +16 -15
- {ragaai_catalyst-2.1.5b0.dist-info → ragaai_catalyst-2.1.5b2.dist-info}/LICENSE +0 -0
- {ragaai_catalyst-2.1.5b0.dist-info → ragaai_catalyst-2.1.5b2.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.1.5b0.dist-info → ragaai_catalyst-2.1.5b2.dist-info}/top_level.txt +0 -0
@@ -61,6 +61,7 @@ class LLMTracerMixin:
|
|
61
61
|
# Add auto_instrument options
|
62
62
|
self.auto_instrument_llm = False
|
63
63
|
self.auto_instrument_user_interaction = False
|
64
|
+
self.auto_instrument_file_io = False
|
64
65
|
self.auto_instrument_network = False
|
65
66
|
|
66
67
|
def instrument_llm_calls(self):
|
@@ -77,6 +78,7 @@ class LLMTracerMixin:
|
|
77
78
|
|
78
79
|
if "openai" in sys.modules:
|
79
80
|
self.patch_openai_methods(sys.modules["openai"])
|
81
|
+
self.patch_openai_beta_methods(sys.modules["openai"])
|
80
82
|
if "litellm" in sys.modules:
|
81
83
|
self.patch_litellm_methods(sys.modules["litellm"])
|
82
84
|
if "anthropic" in sys.modules:
|
@@ -96,6 +98,7 @@ class LLMTracerMixin:
|
|
96
98
|
self.patch_vertex_ai_methods, "vertexai.generative_models"
|
97
99
|
)
|
98
100
|
wrapt.register_post_import_hook(self.patch_openai_methods, "openai")
|
101
|
+
wrapt.register_post_import_hook(self.patch_openai_beta_methods, "openai")
|
99
102
|
wrapt.register_post_import_hook(self.patch_litellm_methods, "litellm")
|
100
103
|
wrapt.register_post_import_hook(self.patch_anthropic_methods, "anthropic")
|
101
104
|
wrapt.register_post_import_hook(
|
@@ -117,6 +120,10 @@ class LLMTracerMixin:
|
|
117
120
|
def instrument_network_calls(self):
|
118
121
|
"""Enable network instrumentation for LLM calls"""
|
119
122
|
self.auto_instrument_network = True
|
123
|
+
|
124
|
+
def instrument_file_io_calls(self):
|
125
|
+
"""Enable file IO instrumentation for LLM calls"""
|
126
|
+
self.auto_instrument_file_io = True
|
120
127
|
|
121
128
|
def patch_openai_methods(self, module):
|
122
129
|
try:
|
@@ -130,6 +137,40 @@ class LLMTracerMixin:
|
|
130
137
|
# Log the error but continue execution
|
131
138
|
print(f"Warning: Failed to patch OpenAI methods: {str(e)}")
|
132
139
|
|
140
|
+
def patch_openai_beta_methods(self, openai_module):
|
141
|
+
"""
|
142
|
+
Patch the new openai.beta endpoints (threads, runs, messages, etc.)
|
143
|
+
so that calls like openai.beta.threads.create(...) or
|
144
|
+
openai.beta.threads.runs.create(...) are automatically traced.
|
145
|
+
"""
|
146
|
+
# Make sure openai_module has a 'beta' attribute
|
147
|
+
if not hasattr(openai_module, "beta"):
|
148
|
+
return
|
149
|
+
|
150
|
+
beta_module = openai_module.beta
|
151
|
+
|
152
|
+
# Patch openai.beta.threads
|
153
|
+
if hasattr(beta_module, "threads"):
|
154
|
+
threads_obj = beta_module.threads
|
155
|
+
# Patch top-level methods on openai.beta.threads
|
156
|
+
for method_name in ["create", "list"]:
|
157
|
+
if hasattr(threads_obj, method_name):
|
158
|
+
self.wrap_method(threads_obj, method_name)
|
159
|
+
|
160
|
+
# Patch the nested objects: messages, runs
|
161
|
+
if hasattr(threads_obj, "messages"):
|
162
|
+
messages_obj = threads_obj.messages
|
163
|
+
for method_name in ["create", "list"]:
|
164
|
+
if hasattr(messages_obj, method_name):
|
165
|
+
self.wrap_method(messages_obj, method_name)
|
166
|
+
|
167
|
+
if hasattr(threads_obj, "runs"):
|
168
|
+
runs_obj = threads_obj.runs
|
169
|
+
for method_name in ["create", "retrieve", "list"]:
|
170
|
+
if hasattr(runs_obj, method_name):
|
171
|
+
self.wrap_method(runs_obj, method_name)
|
172
|
+
|
173
|
+
|
133
174
|
def patch_anthropic_methods(self, module):
|
134
175
|
if hasattr(module, "Anthropic"):
|
135
176
|
client_class = getattr(module, "Anthropic")
|
@@ -334,7 +375,17 @@ class LLMTracerMixin:
|
|
334
375
|
|
335
376
|
interactions = []
|
336
377
|
if self.auto_instrument_user_interaction:
|
337
|
-
|
378
|
+
input_output_interactions = []
|
379
|
+
for interaction in self.component_user_interaction.get(component_id, []):
|
380
|
+
if interaction["interaction_type"] in ["input", "output"]:
|
381
|
+
input_output_interactions.append(interaction)
|
382
|
+
interactions.extend(input_output_interactions)
|
383
|
+
if self.auto_instrument_file_io:
|
384
|
+
file_io_interactions = []
|
385
|
+
for interaction in self.component_user_interaction.get(component_id, []):
|
386
|
+
if interaction["interaction_type"] in ["file_read", "file_write"]:
|
387
|
+
file_io_interactions.append(interaction)
|
388
|
+
interactions.extend(file_io_interactions)
|
338
389
|
|
339
390
|
parameters_to_display = {}
|
340
391
|
if "run_manager" in parameters:
|
@@ -84,27 +84,23 @@ class AgenticTracing(
|
|
84
84
|
self.auto_instrument_custom = True
|
85
85
|
else:
|
86
86
|
# Set global active state
|
87
|
-
self.is_active =
|
88
|
-
any(auto_instrumentation.values())
|
89
|
-
if isinstance(auto_instrumentation, dict)
|
90
|
-
else bool(auto_instrumentation)
|
91
|
-
)
|
87
|
+
self.is_active = True
|
92
88
|
|
93
89
|
# Set individual components
|
94
90
|
if isinstance(auto_instrumentation, dict):
|
95
|
-
self.auto_instrument_llm = auto_instrumentation.get("llm",
|
96
|
-
self.auto_instrument_tool = auto_instrumentation.get("tool",
|
97
|
-
self.auto_instrument_agent = auto_instrumentation.get("agent",
|
91
|
+
self.auto_instrument_llm = auto_instrumentation.get("llm", True)
|
92
|
+
self.auto_instrument_tool = auto_instrumentation.get("tool", True)
|
93
|
+
self.auto_instrument_agent = auto_instrumentation.get("agent", True)
|
98
94
|
self.auto_instrument_user_interaction = auto_instrumentation.get(
|
99
|
-
"user_interaction",
|
95
|
+
"user_interaction", True
|
100
96
|
)
|
101
97
|
self.auto_instrument_file_io = auto_instrumentation.get(
|
102
|
-
"file_io",
|
98
|
+
"file_io", True
|
103
99
|
)
|
104
100
|
self.auto_instrument_network = auto_instrumentation.get(
|
105
|
-
"network",
|
101
|
+
"network", True
|
106
102
|
)
|
107
|
-
self.auto_instrument_custom = auto_instrumentation.get("custom",
|
103
|
+
self.auto_instrument_custom = auto_instrumentation.get("custom", True)
|
108
104
|
else:
|
109
105
|
# If boolean provided, apply to all components
|
110
106
|
self.auto_instrument_llm = bool(auto_instrumentation)
|
@@ -170,9 +166,6 @@ class AgenticTracing(
|
|
170
166
|
self.user_interaction_tracer.trace_id.set(self.trace_id)
|
171
167
|
self.user_interaction_tracer.tracer = self
|
172
168
|
self.user_interaction_tracer.component_id.set(self.current_component_id.get())
|
173
|
-
builtins.print = self.user_interaction_tracer.traced_print
|
174
|
-
builtins.input = self.user_interaction_tracer.traced_input
|
175
|
-
builtins.open = self.user_interaction_tracer.traced_open
|
176
169
|
|
177
170
|
# Start base tracer (includes system info and resource monitoring)
|
178
171
|
super().start()
|
@@ -194,11 +187,12 @@ class AgenticTracing(
|
|
194
187
|
self.instrument_custom_calls()
|
195
188
|
|
196
189
|
if self.auto_instrument_user_interaction:
|
197
|
-
|
198
190
|
ToolTracerMixin.instrument_user_interaction_calls(self)
|
199
191
|
LLMTracerMixin.instrument_user_interaction_calls(self)
|
200
192
|
AgentTracerMixin.instrument_user_interaction_calls(self)
|
201
193
|
CustomTracerMixin.instrument_user_interaction_calls(self)
|
194
|
+
builtins.print = self.user_interaction_tracer.traced_print
|
195
|
+
builtins.input = self.user_interaction_tracer.traced_input
|
202
196
|
|
203
197
|
if self.auto_instrument_network:
|
204
198
|
ToolTracerMixin.instrument_network_calls(self)
|
@@ -206,9 +200,12 @@ class AgenticTracing(
|
|
206
200
|
AgentTracerMixin.instrument_network_calls(self)
|
207
201
|
CustomTracerMixin.instrument_network_calls(self)
|
208
202
|
|
209
|
-
|
210
|
-
|
211
|
-
|
203
|
+
if self.auto_instrument_file_io:
|
204
|
+
ToolTracerMixin.instrument_file_io_calls(self)
|
205
|
+
LLMTracerMixin.instrument_file_io_calls(self)
|
206
|
+
AgentTracerMixin.instrument_file_io_calls(self)
|
207
|
+
CustomTracerMixin.instrument_file_io_calls(self)
|
208
|
+
builtins.open = self.user_interaction_tracer.traced_open
|
212
209
|
|
213
210
|
def stop(self):
|
214
211
|
"""Stop tracing and save results"""
|
@@ -32,6 +32,7 @@ class ToolTracerMixin:
|
|
32
32
|
# add auto_instrument option
|
33
33
|
self.auto_instrument_tool = False
|
34
34
|
self.auto_instrument_user_interaction = False
|
35
|
+
self.auto_instrument_file_io = False
|
35
36
|
self.auto_instrument_network = False
|
36
37
|
|
37
38
|
# take care of auto_instrument
|
@@ -40,6 +41,9 @@ class ToolTracerMixin:
|
|
40
41
|
|
41
42
|
def instrument_user_interaction_calls(self):
|
42
43
|
self.auto_instrument_user_interaction = True
|
44
|
+
|
45
|
+
def instrument_file_io_calls(self):
|
46
|
+
self.auto_instrument_file_io = True
|
43
47
|
|
44
48
|
def instrument_network_calls(self):
|
45
49
|
self.auto_instrument_network = True
|
@@ -133,6 +137,10 @@ class ToolTracerMixin:
|
|
133
137
|
component_id = str(uuid.uuid4())
|
134
138
|
hash_id = generate_unique_hash_simple(func)
|
135
139
|
|
140
|
+
# Set current tool name and store the token
|
141
|
+
name_token = self.current_tool_name.set(name)
|
142
|
+
id_token = self.current_tool_id.set(component_id)
|
143
|
+
|
136
144
|
# Start tracking network calls for this component
|
137
145
|
self.start_component(component_id)
|
138
146
|
|
@@ -191,6 +199,12 @@ class ToolTracerMixin:
|
|
191
199
|
self.add_component(tool_component)
|
192
200
|
|
193
201
|
raise
|
202
|
+
finally:
|
203
|
+
# Reset the tool name and id context
|
204
|
+
if name_token:
|
205
|
+
self.current_tool_name.reset(name_token)
|
206
|
+
if id_token:
|
207
|
+
self.current_tool_id.reset(id_token)
|
194
208
|
|
195
209
|
async def _trace_tool_execution(
|
196
210
|
self, func, name, tool_type, version, *args, **kwargs
|
@@ -207,6 +221,10 @@ class ToolTracerMixin:
|
|
207
221
|
component_id = str(uuid.uuid4())
|
208
222
|
hash_id = generate_unique_hash_simple(func)
|
209
223
|
|
224
|
+
# Set current tool name and store the token
|
225
|
+
name_token = self.current_tool_name.set(name)
|
226
|
+
id_token = self.current_tool_id.set(component_id)
|
227
|
+
|
210
228
|
self.start_component(component_id)
|
211
229
|
try:
|
212
230
|
# Execute the tool
|
@@ -256,6 +274,12 @@ class ToolTracerMixin:
|
|
256
274
|
self.add_component(tool_component)
|
257
275
|
|
258
276
|
raise
|
277
|
+
finally:
|
278
|
+
# Reset the tool name and id context
|
279
|
+
if name_token:
|
280
|
+
self.current_tool_name.reset(name_token)
|
281
|
+
if id_token:
|
282
|
+
self.current_tool_id.reset(id_token)
|
259
283
|
|
260
284
|
def create_tool_component(self, **kwargs):
|
261
285
|
"""Create a tool component according to the data structure"""
|
@@ -264,9 +288,19 @@ class ToolTracerMixin:
|
|
264
288
|
network_calls = self.component_network_calls.get(kwargs["component_id"], [])
|
265
289
|
interactions = []
|
266
290
|
if self.auto_instrument_user_interaction:
|
267
|
-
|
268
|
-
|
269
|
-
|
291
|
+
input_output_interactions = []
|
292
|
+
for interaction in self.component_user_interaction.get(kwargs["component_id"], []):
|
293
|
+
if interaction["interaction_type"] in ["input", "output"]:
|
294
|
+
input_output_interactions.append(interaction)
|
295
|
+
if input_output_interactions!=[]:
|
296
|
+
interactions.extend(input_output_interactions)
|
297
|
+
if self.auto_instrument_file_io:
|
298
|
+
file_io_interactions = []
|
299
|
+
for interaction in self.component_user_interaction.get(kwargs["component_id"], []):
|
300
|
+
if interaction["interaction_type"] in ["file_read", "file_write"]:
|
301
|
+
file_io_interactions.append(interaction)
|
302
|
+
if file_io_interactions!=[]:
|
303
|
+
interactions.extend(file_io_interactions)
|
270
304
|
|
271
305
|
# Get tags, metrics
|
272
306
|
name = kwargs["name"]
|
@@ -2063,7 +2063,9 @@
|
|
2063
2063
|
"input_cost_per_token": 5.9e-07,
|
2064
2064
|
"output_cost_per_token": 7.9e-07,
|
2065
2065
|
"litellm_provider": "groq",
|
2066
|
-
"mode": "chat"
|
2066
|
+
"mode": "chat",
|
2067
|
+
"supports_function_calling": true,
|
2068
|
+
"supports_response_schema": true
|
2067
2069
|
},
|
2068
2070
|
"groq/llama-3.3-70b-specdec": {
|
2069
2071
|
"max_tokens": 8192,
|
@@ -4605,6 +4607,20 @@
|
|
4605
4607
|
"litellm_provider": "replicate",
|
4606
4608
|
"mode": "chat"
|
4607
4609
|
},
|
4610
|
+
"openrouter/deepseek/deepseek-r1": {
|
4611
|
+
"max_tokens": 8192,
|
4612
|
+
"max_input_tokens": 64000,
|
4613
|
+
"max_output_tokens": 8192,
|
4614
|
+
"input_cost_per_token": 5.5e-07,
|
4615
|
+
"input_cost_per_token_cache_hit": 1.4e-07,
|
4616
|
+
"output_cost_per_token": 2.19e-06,
|
4617
|
+
"litellm_provider": "openrouter",
|
4618
|
+
"mode": "chat",
|
4619
|
+
"supports_function_calling": true,
|
4620
|
+
"supports_assistant_prefill": true,
|
4621
|
+
"supports_tool_choice": true,
|
4622
|
+
"supports_prompt_caching": true
|
4623
|
+
},
|
4608
4624
|
"openrouter/deepseek/deepseek-chat": {
|
4609
4625
|
"max_tokens": 8192,
|
4610
4626
|
"max_input_tokens": 66000,
|
@@ -0,0 +1,215 @@
|
|
1
|
+
import platform
|
2
|
+
import psutil
|
3
|
+
import sys
|
4
|
+
import pkg_resources
|
5
|
+
import logging
|
6
|
+
from typing import Dict, List, Optional
|
7
|
+
from ..data.data_structure import (
|
8
|
+
SystemInfo,
|
9
|
+
OSInfo,
|
10
|
+
EnvironmentInfo,
|
11
|
+
Resources,
|
12
|
+
CPUResource,
|
13
|
+
MemoryResource,
|
14
|
+
DiskResource,
|
15
|
+
NetworkResource,
|
16
|
+
ResourceInfo,
|
17
|
+
MemoryInfo,
|
18
|
+
DiskInfo,
|
19
|
+
NetworkInfo,
|
20
|
+
)
|
21
|
+
|
22
|
+
logger = logging.getLogger(__name__)
|
23
|
+
|
24
|
+
class SystemMonitor:
|
25
|
+
def __init__(self, trace_id: str):
|
26
|
+
self.trace_id = trace_id
|
27
|
+
|
28
|
+
def get_system_info(self) -> SystemInfo:
|
29
|
+
# Initialize with None values
|
30
|
+
os_info = OSInfo(
|
31
|
+
name=None,
|
32
|
+
version=None,
|
33
|
+
platform=None,
|
34
|
+
kernel_version=None,
|
35
|
+
)
|
36
|
+
env_info = EnvironmentInfo(
|
37
|
+
name=None,
|
38
|
+
version=None,
|
39
|
+
packages=[],
|
40
|
+
env_path=None,
|
41
|
+
command_to_run=None,
|
42
|
+
)
|
43
|
+
|
44
|
+
try:
|
45
|
+
# Get OS info
|
46
|
+
os_info = OSInfo(
|
47
|
+
name=platform.system(),
|
48
|
+
version=platform.version(),
|
49
|
+
platform=platform.machine(),
|
50
|
+
kernel_version=platform.release(),
|
51
|
+
)
|
52
|
+
except Exception as e:
|
53
|
+
logger.warning(f"Failed to get OS info: {str(e)}")
|
54
|
+
|
55
|
+
try:
|
56
|
+
# Get Python environment info
|
57
|
+
installed_packages = [
|
58
|
+
f"{pkg.key}=={pkg.version}" for pkg in pkg_resources.working_set
|
59
|
+
]
|
60
|
+
env_info = EnvironmentInfo(
|
61
|
+
name="Python",
|
62
|
+
version=platform.python_version(),
|
63
|
+
packages=installed_packages,
|
64
|
+
env_path=sys.prefix,
|
65
|
+
command_to_run=f"python {sys.argv[0]}",
|
66
|
+
)
|
67
|
+
except Exception as e:
|
68
|
+
logger.warning(f"Failed to get environment info: {str(e)}")
|
69
|
+
|
70
|
+
|
71
|
+
# Always return a valid SystemInfo object
|
72
|
+
return SystemInfo(
|
73
|
+
id=f"sys_{self.trace_id}",
|
74
|
+
os=os_info,
|
75
|
+
environment=env_info,
|
76
|
+
source_code="",
|
77
|
+
)
|
78
|
+
|
79
|
+
def get_resources(self) -> Resources:
|
80
|
+
# Initialize with None values
|
81
|
+
cpu_info = ResourceInfo(
|
82
|
+
name=None,
|
83
|
+
cores=None,
|
84
|
+
threads=None,
|
85
|
+
)
|
86
|
+
cpu = CPUResource(info=cpu_info, interval="5s", values=[])
|
87
|
+
|
88
|
+
mem_info = MemoryInfo(
|
89
|
+
total=None,
|
90
|
+
free=None,
|
91
|
+
)
|
92
|
+
mem = MemoryResource(info=mem_info, interval="5s", values=[])
|
93
|
+
|
94
|
+
disk_info = DiskInfo(
|
95
|
+
total=None,
|
96
|
+
free=None,
|
97
|
+
)
|
98
|
+
disk_resource = DiskResource(
|
99
|
+
info=disk_info,
|
100
|
+
interval="5s",
|
101
|
+
read=[],
|
102
|
+
write=[],
|
103
|
+
)
|
104
|
+
|
105
|
+
net_info = NetworkInfo(
|
106
|
+
upload_speed=None,
|
107
|
+
download_speed=None,
|
108
|
+
)
|
109
|
+
net = NetworkResource(
|
110
|
+
info=net_info,
|
111
|
+
interval="5s",
|
112
|
+
uploads=[],
|
113
|
+
downloads=[],
|
114
|
+
)
|
115
|
+
|
116
|
+
try:
|
117
|
+
# CPU info
|
118
|
+
cpu_info = ResourceInfo(
|
119
|
+
name=platform.processor(),
|
120
|
+
cores=psutil.cpu_count(logical=False),
|
121
|
+
threads=psutil.cpu_count(logical=True),
|
122
|
+
)
|
123
|
+
cpu = CPUResource(info=cpu_info, interval="5s", values=[psutil.cpu_percent()])
|
124
|
+
except Exception as e:
|
125
|
+
logger.warning(f"Failed to get CPU info: {str(e)}")
|
126
|
+
|
127
|
+
|
128
|
+
try:
|
129
|
+
# Memory info
|
130
|
+
memory = psutil.virtual_memory()
|
131
|
+
mem_info = MemoryInfo(
|
132
|
+
total=memory.total / (1024**3), # Convert to GB
|
133
|
+
free=memory.available / (1024**3),
|
134
|
+
)
|
135
|
+
mem = MemoryResource(info=mem_info, interval="5s", values=[memory.percent])
|
136
|
+
except Exception as e:
|
137
|
+
logger.warning(f"Failed to get memory info: {str(e)}")
|
138
|
+
|
139
|
+
|
140
|
+
try:
|
141
|
+
# Disk info
|
142
|
+
disk = psutil.disk_usage("/")
|
143
|
+
disk_info = DiskInfo(total=disk.total / (1024**3), free=disk.free / (1024**3))
|
144
|
+
disk_io = psutil.disk_io_counters()
|
145
|
+
disk_resource = DiskResource(
|
146
|
+
info=disk_info,
|
147
|
+
interval="5s",
|
148
|
+
read=[disk_io.read_bytes / (1024**2)], # MB
|
149
|
+
write=[disk_io.write_bytes / (1024**2)],
|
150
|
+
)
|
151
|
+
except Exception as e:
|
152
|
+
logger.warning(f"Failed to get disk info: {str(e)}")
|
153
|
+
|
154
|
+
try:
|
155
|
+
# Network info
|
156
|
+
net_io = psutil.net_io_counters()
|
157
|
+
net_info = NetworkInfo(
|
158
|
+
upload_speed=net_io.bytes_sent / (1024**2), # MB
|
159
|
+
download_speed=net_io.bytes_recv / (1024**2),
|
160
|
+
)
|
161
|
+
net = NetworkResource(
|
162
|
+
info=net_info,
|
163
|
+
interval="5s",
|
164
|
+
uploads=[net_io.bytes_sent / (1024**2)],
|
165
|
+
downloads=[net_io.bytes_recv / (1024**2)],
|
166
|
+
)
|
167
|
+
except Exception as e:
|
168
|
+
logger.warning(f"Failed to get network info: {str(e)}")
|
169
|
+
|
170
|
+
|
171
|
+
# Always return a valid Resources object
|
172
|
+
return Resources(cpu=cpu, memory=mem, disk=disk_resource, network=net)
|
173
|
+
|
174
|
+
def track_memory_usage(self) -> Optional[float]:
|
175
|
+
"""Track memory usage in MB"""
|
176
|
+
try:
|
177
|
+
memory_usage = psutil.Process().memory_info().rss
|
178
|
+
return memory_usage / (1024 * 1024) # Convert to MB
|
179
|
+
except Exception as e:
|
180
|
+
logger.warning(f"Failed to track memory usage: {str(e)}")
|
181
|
+
return None
|
182
|
+
|
183
|
+
def track_cpu_usage(self, interval: float) -> Optional[float]:
|
184
|
+
"""Track CPU usage percentage"""
|
185
|
+
try:
|
186
|
+
return psutil.cpu_percent(interval=interval)
|
187
|
+
except Exception as e:
|
188
|
+
logger.warning(f"Failed to track CPU usage: {str(e)}")
|
189
|
+
return None
|
190
|
+
|
191
|
+
def track_disk_usage(self) -> Dict[str, Optional[float]]:
|
192
|
+
"""Track disk I/O in MB"""
|
193
|
+
default_response = {'disk_read': None, 'disk_write': None}
|
194
|
+
try:
|
195
|
+
disk_io = psutil.disk_io_counters()
|
196
|
+
return {
|
197
|
+
'disk_read': disk_io.read_bytes / (1024 * 1024), # Convert to MB
|
198
|
+
'disk_write': disk_io.write_bytes / (1024 * 1024) # Convert to MB
|
199
|
+
}
|
200
|
+
except Exception as e:
|
201
|
+
logger.warning(f"Failed to track disk usage: {str(e)}")
|
202
|
+
return default_response
|
203
|
+
|
204
|
+
def track_network_usage(self) -> Dict[str, Optional[float]]:
|
205
|
+
"""Track network I/O in MB"""
|
206
|
+
default_response = {'uploads': None, 'downloads': None}
|
207
|
+
try:
|
208
|
+
net_io = psutil.net_io_counters()
|
209
|
+
return {
|
210
|
+
'uploads': net_io.bytes_sent / (1024 * 1024), # Convert to MB
|
211
|
+
'downloads': net_io.bytes_recv / (1024 * 1024) # Convert to MB
|
212
|
+
}
|
213
|
+
except Exception as e:
|
214
|
+
logger.warning(f"Failed to track network usage: {str(e)}")
|
215
|
+
return default_response
|
@@ -191,6 +191,7 @@ def trace_tool(name: str = None, tool_type: str = "generic", version: str = "1.0
|
|
191
191
|
"""Decorator for tracing tool functions."""
|
192
192
|
def decorator(func):
|
193
193
|
is_async = asyncio.iscoroutinefunction(func)
|
194
|
+
span_name = name or func.__name__
|
194
195
|
|
195
196
|
@wraps(func)
|
196
197
|
async def async_wrapper(*args, **kwargs):
|
@@ -198,15 +199,23 @@ def trace_tool(name: str = None, tool_type: str = "generic", version: str = "1.0
|
|
198
199
|
if not tracer:
|
199
200
|
return await func(*args, **kwargs)
|
200
201
|
|
201
|
-
#
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
202
|
+
# Set current tool name and store the token
|
203
|
+
name_token = tracer.current_tool_name.set(span_name)
|
204
|
+
|
205
|
+
try:
|
206
|
+
# Use async tool tracing
|
207
|
+
return await tracer._trace_tool_execution(
|
208
|
+
func,
|
209
|
+
span_name,
|
210
|
+
tool_type,
|
211
|
+
version,
|
212
|
+
*args,
|
213
|
+
**kwargs
|
214
|
+
)
|
215
|
+
finally:
|
216
|
+
# Reset using the stored token
|
217
|
+
if name_token:
|
218
|
+
tracer.current_tool_name.reset(name_token)
|
210
219
|
|
211
220
|
@wraps(func)
|
212
221
|
def sync_wrapper(*args, **kwargs):
|
@@ -214,15 +223,23 @@ def trace_tool(name: str = None, tool_type: str = "generic", version: str = "1.0
|
|
214
223
|
if not tracer:
|
215
224
|
return func(*args, **kwargs)
|
216
225
|
|
217
|
-
#
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
+
# Set current tool name and store the token
|
227
|
+
name_token = tracer.current_tool_name.set(span_name)
|
228
|
+
|
229
|
+
try:
|
230
|
+
# Use synchronous tool tracing
|
231
|
+
return tracer._trace_sync_tool_execution(
|
232
|
+
func,
|
233
|
+
span_name,
|
234
|
+
tool_type,
|
235
|
+
version,
|
236
|
+
*args,
|
237
|
+
**kwargs
|
238
|
+
)
|
239
|
+
finally:
|
240
|
+
# Reset using the stored token
|
241
|
+
if name_token:
|
242
|
+
tracer.current_tool_name.reset(name_token)
|
226
243
|
|
227
244
|
return async_wrapper if is_async else sync_wrapper
|
228
245
|
return decorator
|
@@ -278,7 +295,17 @@ def current_span():
|
|
278
295
|
if not tracer:
|
279
296
|
return None
|
280
297
|
|
281
|
-
#
|
298
|
+
# First check for LLM context
|
299
|
+
llm_name = tracer.current_llm_call_name.get()
|
300
|
+
if llm_name:
|
301
|
+
return tracer.span(llm_name)
|
302
|
+
|
303
|
+
# Then check for tool context
|
304
|
+
tool_name = tracer.current_tool_name.get()
|
305
|
+
if tool_name:
|
306
|
+
return tracer.span(tool_name)
|
307
|
+
|
308
|
+
# Finally fall back to agent context
|
282
309
|
agent_name = tracer.current_agent_name.get()
|
283
310
|
if not agent_name:
|
284
311
|
raise ValueError("No active span found. Make sure you're calling this within a traced function.")
|
@@ -98,10 +98,10 @@ class Tracer(AgenticTracing):
|
|
98
98
|
"custom": False
|
99
99
|
}
|
100
100
|
elif isinstance(auto_instrumentation, dict):
|
101
|
-
auto_instrumentation = {k: v for k, v in auto_instrumentation.items()
|
101
|
+
auto_instrumentation = {k: v for k, v in auto_instrumentation.items()}
|
102
102
|
for key in ["llm", "tool", "agent", "user_interaction", "file_io", "network", "custom"]:
|
103
103
|
if key not in auto_instrumentation:
|
104
|
-
auto_instrumentation[key] =
|
104
|
+
auto_instrumentation[key] = True
|
105
105
|
|
106
106
|
super().__init__(user_detail=user_detail, auto_instrumentation=auto_instrumentation)
|
107
107
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.5b2
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>
|
6
6
|
Requires-Python: <3.13,>=3.9
|