ragaai-catalyst 2.2.4b5__py3-none-any.whl → 2.2.5b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/__init__.py +0 -2
- ragaai_catalyst/dataset.py +59 -1
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +5 -285
- ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -2
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +1 -1
- ragaai_catalyst/tracers/exporters/__init__.py +1 -2
- ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -1
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +23 -1
- ragaai_catalyst/tracers/tracer.py +6 -186
- {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/METADATA +1 -1
- {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/RECORD +14 -45
- ragaai_catalyst/experiment.py +0 -486
- ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -536
- ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -134
- ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -563
- ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
- ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -197
- ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -172
- ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -687
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +0 -1319
- ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -347
- ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
- ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -1182
- ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -288
- ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -557
- ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -129
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -74
- ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -21
- ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -32
- ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -28
- ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -133
- ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -34
- ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -467
- ragaai_catalyst/tracers/langchain_callback.py +0 -821
- ragaai_catalyst/tracers/llamaindex_callback.py +0 -361
- ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -424
- ragaai_catalyst/tracers/upload_traces.py +0 -170
- ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -62
- ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -69
- ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -74
- ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -82
- ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +0 -403
- {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/licenses/LICENSE +0 -0
- {ragaai_catalyst-2.2.4b5.dist-info → ragaai_catalyst-2.2.5b2.dist-info}/top_level.txt +0 -0
ragaai_catalyst/__init__.py
CHANGED
@@ -1,4 +1,3 @@
|
|
1
|
-
from .experiment import Experiment
|
2
1
|
from .ragaai_catalyst import RagaAICatalyst
|
3
2
|
from .utils import response_checker
|
4
3
|
from .dataset import Dataset
|
@@ -15,7 +14,6 @@ from .redteaming import RedTeaming
|
|
15
14
|
|
16
15
|
|
17
16
|
__all__ = [
|
18
|
-
"Experiment",
|
19
17
|
"RagaAICatalyst",
|
20
18
|
"Tracer",
|
21
19
|
"PromptManager",
|
ragaai_catalyst/dataset.py
CHANGED
@@ -732,4 +732,62 @@ class Dataset:
|
|
732
732
|
try:
|
733
733
|
os.remove(tmp_csv_path)
|
734
734
|
except Exception as e:
|
735
|
-
logger.error(f"Error removing temporary CSV file: {e}")
|
735
|
+
logger.error(f"Error removing temporary CSV file: {e}")
|
736
|
+
|
737
|
+
def delete_dataset(self, dataset_name):
|
738
|
+
try:
|
739
|
+
def make_request():
|
740
|
+
headers = {
|
741
|
+
'Content-Type': 'application/json',
|
742
|
+
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
743
|
+
"X-Project-Id": str(self.project_id),
|
744
|
+
}
|
745
|
+
json_data = {"size": 99999, "page": "0", "projectId": str(self.project_id), "search": ""}
|
746
|
+
try:
|
747
|
+
response = requests.post(
|
748
|
+
f"{Dataset.BASE_URL}/v2/llm/dataset",
|
749
|
+
headers=headers,
|
750
|
+
json=json_data,
|
751
|
+
timeout=Dataset.TIMEOUT,
|
752
|
+
)
|
753
|
+
response.raise_for_status()
|
754
|
+
return response
|
755
|
+
except requests.exceptions.RequestException as e:
|
756
|
+
logger.error(f"Failed to list datasets: {e}")
|
757
|
+
pass
|
758
|
+
|
759
|
+
response = make_request()
|
760
|
+
|
761
|
+
datasets = response.json()["data"]["content"]
|
762
|
+
dataset_list = [dataset["name"] for dataset in datasets]
|
763
|
+
if dataset_name not in dataset_list:
|
764
|
+
logger.error(f"Dataset '{dataset_name}' does not exists. Please enter a existing dataset name")
|
765
|
+
return
|
766
|
+
|
767
|
+
# Get dataset id
|
768
|
+
dataset_id = [dataset["id"] for dataset in datasets if dataset["name"] == dataset_name][0]
|
769
|
+
|
770
|
+
response = requests.delete(
|
771
|
+
f"{Dataset.BASE_URL}/v1/llm/dataset/{int(dataset_id)}",
|
772
|
+
headers={
|
773
|
+
'Authorization': f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
774
|
+
"X-Project-Id": str(self.project_id)
|
775
|
+
},
|
776
|
+
timeout=Dataset.TIMEOUT
|
777
|
+
)
|
778
|
+
response.raise_for_status()
|
779
|
+
if response.json()["success"]:
|
780
|
+
print(f"Dataset '{dataset_name}' deleted successfully")
|
781
|
+
else:
|
782
|
+
logger.error("Request was not successful")
|
783
|
+
except requests.exceptions.HTTPError as http_err:
|
784
|
+
logger.error(f"HTTP error occurred: {http_err}")
|
785
|
+
except requests.exceptions.ConnectionError as conn_err:
|
786
|
+
logger.error(f"Connection error occurred: {conn_err}")
|
787
|
+
except requests.exceptions.Timeout as timeout_err:
|
788
|
+
logger.error(f"Timeout error occurred: {timeout_err}")
|
789
|
+
except requests.exceptions.RequestException as req_err:
|
790
|
+
logger.error(f"An error occurred: {req_err}")
|
791
|
+
except Exception as e:
|
792
|
+
logger.error(f"An unexpected error occurred: {e}")
|
793
|
+
|
@@ -7,15 +7,7 @@ import os
|
|
7
7
|
import builtins
|
8
8
|
from pathlib import Path
|
9
9
|
import logging
|
10
|
-
|
11
|
-
from .base import BaseTracer
|
12
|
-
from .llm_tracer import LLMTracerMixin
|
13
|
-
from .tool_tracer import ToolTracerMixin
|
14
|
-
from .agent_tracer import AgentTracerMixin
|
15
|
-
from .network_tracer import NetworkTracer
|
16
|
-
from .user_interaction_tracer import UserInteractionTracer
|
17
|
-
from .custom_tracer import CustomTracerMixin
|
18
|
-
from ..utils.span_attributes import SpanAttributes
|
10
|
+
logger = logging.getLogger(__name__)
|
19
11
|
|
20
12
|
from ..data.data_structure import (
|
21
13
|
Trace,
|
@@ -42,20 +34,10 @@ from ..data.data_structure import (
|
|
42
34
|
)
|
43
35
|
|
44
36
|
from ....ragaai_catalyst import RagaAICatalyst
|
45
|
-
from ragaai_catalyst.tracers.upload_traces import UploadTraces
|
46
37
|
|
47
38
|
|
48
|
-
class AgenticTracing(
|
49
|
-
BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMixin, CustomTracerMixin
|
50
|
-
):
|
39
|
+
class AgenticTracing():
|
51
40
|
def __init__(self, user_detail, auto_instrumentation=None, timeout=120):
|
52
|
-
# Initialize all parent classes
|
53
|
-
self.user_interaction_tracer = UserInteractionTracer()
|
54
|
-
LLMTracerMixin.__init__(self)
|
55
|
-
ToolTracerMixin.__init__(self)
|
56
|
-
AgentTracerMixin.__init__(self)
|
57
|
-
CustomTracerMixin.__init__(self)
|
58
|
-
|
59
41
|
self.project_name = user_detail["project_name"]
|
60
42
|
self.project_id = user_detail["project_id"]
|
61
43
|
self.trace_user_detail = user_detail["trace_user_detail"]
|
@@ -65,14 +47,11 @@ class AgenticTracing(
|
|
65
47
|
# Add warning flag
|
66
48
|
self._warning_shown = False
|
67
49
|
|
68
|
-
BaseTracer.__init__(self, user_detail)
|
69
|
-
|
70
50
|
self.tools: Dict[str, Tool] = {}
|
71
51
|
self.call_depth = contextvars.ContextVar("call_depth", default=0)
|
72
52
|
self.current_component_id = contextvars.ContextVar(
|
73
53
|
"current_component_id", default=None
|
74
54
|
)
|
75
|
-
self.network_tracer = NetworkTracer()
|
76
55
|
|
77
56
|
# Handle auto_instrumentation
|
78
57
|
if auto_instrumentation is None:
|
@@ -120,272 +99,13 @@ class AgenticTracing(
|
|
120
99
|
self.component_user_interaction = {}
|
121
100
|
|
122
101
|
|
123
|
-
def start_component(self, component_id: str):
|
124
|
-
"""Start tracking network calls for a component"""
|
125
|
-
self.component_network_calls[component_id] = []
|
126
|
-
self.network_tracer.network_calls = [] # Reset network calls
|
127
|
-
self.current_component_id.set(component_id)
|
128
|
-
self.user_interaction_tracer.component_id.set(component_id)
|
129
|
-
|
130
|
-
def end_component(self, component_id: str):
|
131
|
-
"""End tracking network calls for a component"""
|
132
|
-
self.component_network_calls[component_id] = (
|
133
|
-
self.network_tracer.network_calls.copy()
|
134
|
-
)
|
135
|
-
self.network_tracer.network_calls = [] # Reset for next component
|
136
|
-
|
137
|
-
# Store user interactions for the component
|
138
|
-
for interaction in self.user_interaction_tracer.interactions:
|
139
|
-
interaction_component_id = interaction.get("component_id")
|
140
|
-
if interaction_component_id not in self.component_user_interaction:
|
141
|
-
self.component_user_interaction[interaction_component_id] = []
|
142
|
-
if interaction not in self.component_user_interaction[interaction_component_id]:
|
143
|
-
self.component_user_interaction[interaction_component_id].append(interaction)
|
144
|
-
|
145
|
-
# Only reset component_id if it matches the current one
|
146
|
-
# This ensures we don't reset a parent's component_id when a child component ends
|
147
|
-
if self.current_component_id.get() == component_id:
|
148
|
-
# Get the parent agent's component_id if it exists
|
149
|
-
parent_agent_id = self.current_agent_id.get()
|
150
|
-
# If there's a parent agent, set the component_id back to the parent's
|
151
|
-
if parent_agent_id:
|
152
|
-
self.current_component_id.set(parent_agent_id)
|
153
|
-
self.user_interaction_tracer.component_id.set(parent_agent_id)
|
154
|
-
else:
|
155
|
-
# Only reset to None if there's no parent
|
156
|
-
self.current_component_id.set(None)
|
157
|
-
self.user_interaction_tracer.component_id.set(None)
|
158
|
-
|
159
102
|
def register_post_processor(self, post_processor_func):
|
160
103
|
"""
|
161
104
|
Pass through the post-processor registration to the BaseTracer
|
162
105
|
"""
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
"""Start tracing"""
|
167
|
-
self.is_active = True
|
168
|
-
|
169
|
-
# Setup user interaction tracing
|
170
|
-
self.user_interaction_tracer.project_id.set(self.project_id)
|
171
|
-
self.user_interaction_tracer.trace_id.set(self.trace_id)
|
172
|
-
self.user_interaction_tracer.tracer = self
|
173
|
-
self.user_interaction_tracer.component_id.set(self.current_component_id.get())
|
174
|
-
|
175
|
-
# Start base tracer (includes system info and resource monitoring)
|
176
|
-
super().start()
|
177
|
-
|
178
|
-
# Activate network tracing
|
179
|
-
self.network_tracer.activate_patches()
|
180
|
-
|
181
|
-
# take care of the auto instrumentation
|
182
|
-
if self.auto_instrument_user_interaction:
|
183
|
-
ToolTracerMixin.instrument_user_interaction_calls(self)
|
184
|
-
LLMTracerMixin.instrument_user_interaction_calls(self)
|
185
|
-
AgentTracerMixin.instrument_user_interaction_calls(self)
|
186
|
-
CustomTracerMixin.instrument_user_interaction_calls(self)
|
187
|
-
builtins.print = self.user_interaction_tracer.traced_print
|
188
|
-
builtins.input = self.user_interaction_tracer.traced_input
|
189
|
-
|
190
|
-
if self.auto_instrument_network:
|
191
|
-
ToolTracerMixin.instrument_network_calls(self)
|
192
|
-
LLMTracerMixin.instrument_network_calls(self)
|
193
|
-
AgentTracerMixin.instrument_network_calls(self)
|
194
|
-
CustomTracerMixin.instrument_network_calls(self)
|
195
|
-
|
196
|
-
if self.auto_instrument_file_io:
|
197
|
-
ToolTracerMixin.instrument_file_io_calls(self)
|
198
|
-
LLMTracerMixin.instrument_file_io_calls(self)
|
199
|
-
AgentTracerMixin.instrument_file_io_calls(self)
|
200
|
-
CustomTracerMixin.instrument_file_io_calls(self)
|
201
|
-
builtins.open = self.user_interaction_tracer.traced_open
|
202
|
-
|
203
|
-
if self.auto_instrument_llm:
|
204
|
-
self.instrument_llm_calls()
|
205
|
-
|
206
|
-
if self.auto_instrument_tool:
|
207
|
-
self.instrument_tool_calls()
|
208
|
-
|
209
|
-
if self.auto_instrument_agent:
|
210
|
-
self.instrument_agent_calls()
|
211
|
-
|
212
|
-
if self.auto_instrument_custom:
|
213
|
-
self.instrument_custom_calls()
|
214
|
-
|
215
|
-
def stop(self):
|
216
|
-
"""Stop tracing and save results"""
|
217
|
-
if self.is_active:
|
218
|
-
# Restore original print and input functions
|
219
|
-
builtins.print = self.user_interaction_tracer.original_print
|
220
|
-
builtins.input = self.user_interaction_tracer.original_input
|
221
|
-
builtins.open = self.user_interaction_tracer.original_open
|
222
|
-
|
223
|
-
# Calculate final metrics before stopping
|
224
|
-
self._calculate_final_metrics()
|
225
|
-
|
226
|
-
# Deactivate network tracing
|
227
|
-
self.network_tracer.deactivate_patches()
|
228
|
-
|
229
|
-
# Clear visited metrics when stopping trace
|
230
|
-
self.visited_metrics.clear()
|
231
|
-
|
232
|
-
# Stop base tracer (includes saving to file)
|
233
|
-
super().stop()
|
234
|
-
|
235
|
-
# Cleanup
|
236
|
-
self.unpatch_llm_calls()
|
237
|
-
self.user_interaction_tracer.interactions = [] # Clear interactions list
|
238
|
-
self.is_active = False
|
239
|
-
|
240
|
-
def _calculate_final_metrics(self):
|
241
|
-
"""Calculate total cost and tokens from all components"""
|
242
|
-
total_cost = 0.0
|
243
|
-
total_tokens = 0
|
244
|
-
|
245
|
-
processed_components = set()
|
246
|
-
|
247
|
-
def process_component(component):
|
248
|
-
nonlocal total_cost, total_tokens
|
249
|
-
# Convert component to dict if it's an object
|
250
|
-
comp_dict = (
|
251
|
-
component.__dict__ if hasattr(component, "__dict__") else component
|
252
|
-
)
|
253
|
-
|
254
|
-
comp_id = comp_dict.get("id") or comp_dict.get("component_id")
|
255
|
-
if comp_id in processed_components:
|
256
|
-
return # Skip if already processed
|
257
|
-
processed_components.add(comp_id)
|
258
|
-
|
259
|
-
if comp_dict.get("type") == "llm":
|
260
|
-
info = comp_dict.get("info", {})
|
261
|
-
if isinstance(info, dict):
|
262
|
-
# Extract cost
|
263
|
-
cost_info = info.get("cost", {})
|
264
|
-
if isinstance(cost_info, dict):
|
265
|
-
total_cost += cost_info.get("total_cost", 0)
|
266
|
-
|
267
|
-
# Extract tokens
|
268
|
-
token_info = info.get("tokens", {})
|
269
|
-
if isinstance(token_info, dict):
|
270
|
-
total_tokens += token_info.get("total_tokens", 0)
|
271
|
-
else:
|
272
|
-
token_info = info.get("token_usage", {})
|
273
|
-
if isinstance(token_info, dict):
|
274
|
-
total_tokens += token_info.get("total_tokens", 0)
|
275
|
-
|
276
|
-
# Process children if they exist
|
277
|
-
data = comp_dict.get("data", {})
|
278
|
-
if isinstance(data, dict):
|
279
|
-
children = data.get("children", [])
|
280
|
-
if children:
|
281
|
-
for child in children:
|
282
|
-
process_component(child)
|
283
|
-
|
284
|
-
# Process all root components
|
285
|
-
for component in self.components:
|
286
|
-
process_component(component)
|
287
|
-
|
288
|
-
# Update metadata in trace
|
289
|
-
if hasattr(self, "trace"):
|
290
|
-
if isinstance(self.trace.metadata, dict):
|
291
|
-
self.trace.metadata["total_cost"] = total_cost
|
292
|
-
self.trace.metadata["total_tokens"] = total_tokens
|
293
|
-
else:
|
294
|
-
self.trace.metadata.total_cost = total_cost
|
295
|
-
self.trace.metadata.total_tokens = total_tokens
|
296
|
-
|
297
|
-
def add_component(self, component_data: dict, is_error: bool = False):
|
298
|
-
"""Add a component to the trace data"""
|
299
|
-
# Convert dict to appropriate Component type
|
300
|
-
filtered_data = {
|
301
|
-
k: v
|
302
|
-
for k, v in component_data.items()
|
303
|
-
if k
|
304
|
-
in [
|
305
|
-
"id",
|
306
|
-
"hash_id",
|
307
|
-
"source_hash_id",
|
308
|
-
"type",
|
309
|
-
"name",
|
310
|
-
"start_time",
|
311
|
-
"end_time",
|
312
|
-
"parent_id",
|
313
|
-
"info",
|
314
|
-
"extra_info",
|
315
|
-
"data",
|
316
|
-
"metadata",
|
317
|
-
"metrics",
|
318
|
-
"feedback",
|
319
|
-
"network_calls",
|
320
|
-
"interactions",
|
321
|
-
"error",
|
322
|
-
]
|
323
|
-
}
|
324
|
-
|
325
|
-
if component_data == None or component_data == {} or component_data.get("type", None) == None:
|
326
|
-
# Only show warning if it hasn't been shown before
|
327
|
-
if not self._warning_shown:
|
328
|
-
import toml
|
329
|
-
import os
|
330
|
-
from pathlib import Path
|
331
|
-
|
332
|
-
# Load supported LLM calls from TOML file
|
333
|
-
current_dir = Path(__file__).parent
|
334
|
-
toml_path = current_dir / "../utils/supported_llm_provider.toml"
|
335
|
-
try:
|
336
|
-
with open(toml_path, "r") as f:
|
337
|
-
config = toml.load(f)
|
338
|
-
supported_calls = ", ".join(config["supported_llm_calls"])
|
339
|
-
except Exception as e:
|
340
|
-
supported_calls = "Error loading supported LLM calls"
|
341
|
-
|
342
|
-
# ANSI escape codes for colors and formatting
|
343
|
-
RED = "\033[91m"
|
344
|
-
BOLD = "\033[1m"
|
345
|
-
RESET = "\033[0m"
|
346
|
-
BIG = "\033[1;2m" # Makes text slightly larger in supported terminals
|
347
|
-
|
348
|
-
warning_msg = f"""{RED}{BOLD}{BIG}
|
349
|
-
╔════════════════════════ COMPONENT DATA INCOMPLETE ════════════════════════╗
|
350
|
-
║ ║
|
351
|
-
║ Please ensure these requirements: ║
|
352
|
-
║ ✗ trace_llm decorator must have a stand alone llm call ║
|
353
|
-
║ ✗ trace_tool decorator must be a stand alone tool/function call ║
|
354
|
-
║ ✗ trace_agent decorator can have multiple/nested llm/tool/agent calls ║
|
355
|
-
║ ║
|
356
|
-
║ Supported LLM calls: ║
|
357
|
-
║ {supported_calls} ║
|
358
|
-
║ ║
|
359
|
-
╚══════════════════════════════════════════════════════════════════════════╝
|
360
|
-
{RESET}"""
|
361
|
-
# Use logger.warning for the message
|
362
|
-
logging.warning(warning_msg)
|
363
|
-
self._warning_shown = True
|
364
|
-
return
|
365
|
-
|
366
|
-
if component_data["type"] == "llm":
|
367
|
-
component = LLMComponent(**filtered_data)
|
368
|
-
elif component_data["type"] == "agent":
|
369
|
-
component = AgentComponent(**filtered_data)
|
370
|
-
elif component_data["type"] == "tool":
|
371
|
-
component = ToolComponent(**filtered_data)
|
372
|
-
else:
|
373
|
-
component = Component(**component_data)
|
374
|
-
|
375
|
-
# Check if there's an active agent context
|
376
|
-
current_agent_id = self.current_agent_id.get()
|
377
|
-
if current_agent_id and component_data["type"] in ["llm", "tool", "custom"]:
|
378
|
-
# Add this component as a child of the current agent
|
379
|
-
current_children = self.agent_children.get()
|
380
|
-
current_children.append(component_data)
|
381
|
-
self.agent_children.set(current_children)
|
382
|
-
else:
|
383
|
-
# Add component to the main trace
|
384
|
-
super().add_component(component)
|
385
|
-
|
386
|
-
# Handle error case
|
387
|
-
if is_error and not self.current_agent_id.get():
|
388
|
-
self.stop()
|
106
|
+
if not callable(post_processor_func):
|
107
|
+
logger.error("post_processor_func must be a callable")
|
108
|
+
self.post_processor = post_processor_func
|
389
109
|
|
390
110
|
def __enter__(self):
|
391
111
|
"""Context manager entry"""
|
@@ -2,7 +2,7 @@ import os
|
|
2
2
|
import json
|
3
3
|
import re
|
4
4
|
import requests
|
5
|
-
from ragaai_catalyst
|
5
|
+
from ragaai_catalyst import RagaAICatalyst
|
6
6
|
|
7
7
|
def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, user_details=None, timeout=120):
|
8
8
|
SCHEMA_MAPPING = {}
|
@@ -1,7 +1,6 @@
|
|
1
1
|
from .file_span_exporter import FileSpanExporter
|
2
|
-
from .raga_exporter import RagaExporter
|
3
2
|
from .ragaai_trace_exporter import RAGATraceExporter
|
4
3
|
from .dynamic_trace_exporter import DynamicTraceExporter
|
5
4
|
|
6
5
|
|
7
|
-
__all__ = ["FileSpanExporter", "
|
6
|
+
__all__ = ["FileSpanExporter", "RAGATraceExporter", "DynamicTraceExporter"]
|
@@ -9,7 +9,6 @@ import asyncio
|
|
9
9
|
from concurrent.futures import ThreadPoolExecutor
|
10
10
|
from opentelemetry.sdk.trace.export import SpanExporter
|
11
11
|
from ..utils import get_unique_key
|
12
|
-
from .raga_exporter import RagaExporter
|
13
12
|
|
14
13
|
# Set up logging
|
15
14
|
logging.basicConfig(level=logging.INFO)
|
@@ -6,7 +6,6 @@ from dataclasses import asdict
|
|
6
6
|
|
7
7
|
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
8
8
|
|
9
|
-
from ragaai_catalyst.tracers.agentic_tracing.tracers.base import TracerJSONEncoder
|
10
9
|
from ragaai_catalyst.tracers.agentic_tracing.upload.trace_uploader import (
|
11
10
|
submit_upload_task,
|
12
11
|
)
|
@@ -24,6 +23,29 @@ logging_level = (
|
|
24
23
|
logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
|
25
24
|
)
|
26
25
|
|
26
|
+
class TracerJSONEncoder(json.JSONEncoder):
|
27
|
+
def default(self, obj):
|
28
|
+
if isinstance(obj, datetime):
|
29
|
+
return obj.isoformat()
|
30
|
+
if isinstance(obj, bytes):
|
31
|
+
try:
|
32
|
+
return obj.decode("utf-8")
|
33
|
+
except UnicodeDecodeError:
|
34
|
+
return str(obj) # Fallback to string representation
|
35
|
+
if hasattr(obj, "to_dict"): # Handle objects with to_dict method
|
36
|
+
return obj.to_dict()
|
37
|
+
if hasattr(obj, "__dict__"):
|
38
|
+
# Filter out None values and handle nested serialization
|
39
|
+
return {
|
40
|
+
k: v
|
41
|
+
for k, v in obj.__dict__.items()
|
42
|
+
if v is not None and not k.startswith("_")
|
43
|
+
}
|
44
|
+
try:
|
45
|
+
# Try to convert to a basic type
|
46
|
+
return str(obj)
|
47
|
+
except:
|
48
|
+
return None # Last resort: return None instead of failing
|
27
49
|
|
28
50
|
class RAGATraceExporter(SpanExporter):
|
29
51
|
def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
|