ragaai-catalyst 2.2.4.2b1__py3-none-any.whl → 2.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/__init__.py +0 -2
- ragaai_catalyst/dataset.py +59 -1
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +5 -285
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +3 -1
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +5 -6
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +13 -12
- ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py +0 -2
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +20 -9
- ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py +9 -1
- ragaai_catalyst/tracers/exporters/__init__.py +1 -2
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +48 -35
- ragaai_catalyst/tracers/exporters/file_span_exporter.py +0 -1
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +81 -28
- ragaai_catalyst/tracers/tracer.py +14 -191
- ragaai_catalyst/tracers/utils/trace_json_converter.py +7 -1
- {ragaai_catalyst-2.2.4.2b1.dist-info → ragaai_catalyst-2.2.5.dist-info}/METADATA +2 -1
- {ragaai_catalyst-2.2.4.2b1.dist-info → ragaai_catalyst-2.2.5.dist-info}/RECORD +20 -51
- ragaai_catalyst/experiment.py +0 -486
- ragaai_catalyst/tracers/agentic_tracing/tests/FinancialAnalysisSystem.ipynb +0 -536
- ragaai_catalyst/tracers/agentic_tracing/tests/GameActivityEventPlanner.ipynb +0 -134
- ragaai_catalyst/tracers/agentic_tracing/tests/TravelPlanner.ipynb +0 -563
- ragaai_catalyst/tracers/agentic_tracing/tests/__init__.py +0 -0
- ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py +0 -197
- ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py +0 -172
- ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py +0 -687
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +0 -1319
- ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py +0 -347
- ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py +0 -0
- ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py +0 -1182
- ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py +0 -288
- ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py +0 -557
- ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py +0 -129
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py +0 -74
- ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py +0 -21
- ragaai_catalyst/tracers/agentic_tracing/utils/generic.py +0 -32
- ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py +0 -28
- ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py +0 -133
- ragaai_catalyst/tracers/agentic_tracing/utils/supported_llm_provider.toml +0 -34
- ragaai_catalyst/tracers/exporters/raga_exporter.py +0 -467
- ragaai_catalyst/tracers/langchain_callback.py +0 -821
- ragaai_catalyst/tracers/llamaindex_callback.py +0 -361
- ragaai_catalyst/tracers/llamaindex_instrumentation.py +0 -424
- ragaai_catalyst/tracers/upload_traces.py +0 -170
- ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py +0 -62
- ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py +0 -69
- ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py +0 -74
- ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py +0 -82
- ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +0 -403
- {ragaai_catalyst-2.2.4.2b1.dist-info → ragaai_catalyst-2.2.5.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.2.4.2b1.dist-info → ragaai_catalyst-2.2.5.dist-info}/licenses/LICENSE +0 -0
- {ragaai_catalyst-2.2.4.2b1.dist-info → ragaai_catalyst-2.2.5.dist-info}/top_level.txt +0 -0
@@ -4,17 +4,34 @@ Dynamic Trace Exporter - A wrapper for RAGATraceExporter that allows dynamic upd
|
|
4
4
|
import logging
|
5
5
|
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
6
6
|
from ragaai_catalyst.tracers.exporters.ragaai_trace_exporter import RAGATraceExporter
|
7
|
+
from typing import Optional, List, Dict, Callable
|
7
8
|
|
8
9
|
logger = logging.getLogger("RagaAICatalyst")
|
9
10
|
|
11
|
+
|
10
12
|
class DynamicTraceExporter(SpanExporter):
|
11
13
|
"""
|
12
14
|
A wrapper around RAGATraceExporter that allows dynamic updates to properties.
|
13
15
|
This exporter forwards all calls to the underlying RAGATraceExporter but allows
|
14
16
|
certain properties to be updated dynamically during execution.
|
15
17
|
"""
|
16
|
-
|
17
|
-
|
18
|
+
def __init__(
|
19
|
+
self,
|
20
|
+
project_name: str,
|
21
|
+
dataset_name: str,
|
22
|
+
base_url: str,
|
23
|
+
tracer_type: str,
|
24
|
+
files_to_zip: Optional[List[str]] = None,
|
25
|
+
project_id: Optional[str] = None,
|
26
|
+
user_details: Optional[Dict] = None,
|
27
|
+
custom_model_cost: Optional[dict] = None,
|
28
|
+
timeout: int = 120,
|
29
|
+
post_processor: Optional[Callable] = None,
|
30
|
+
max_upload_workers: int = 30,
|
31
|
+
user_context: Optional[str] = None,
|
32
|
+
user_gt: Optional[str] = None,
|
33
|
+
external_id: Optional[str] = None
|
34
|
+
):
|
18
35
|
"""
|
19
36
|
Initialize the DynamicTraceExporter.
|
20
37
|
|
@@ -30,22 +47,22 @@ class DynamicTraceExporter(SpanExporter):
|
|
30
47
|
max_upload_workers: Maximum number of upload workers
|
31
48
|
"""
|
32
49
|
self._exporter = RAGATraceExporter(
|
50
|
+
project_name=project_name,
|
51
|
+
dataset_name=dataset_name,
|
52
|
+
base_url=base_url,
|
33
53
|
tracer_type=tracer_type,
|
34
54
|
files_to_zip=files_to_zip,
|
35
|
-
project_name=project_name,
|
36
55
|
project_id=project_id,
|
37
|
-
dataset_name=dataset_name,
|
38
56
|
user_details=user_details,
|
39
|
-
base_url=base_url,
|
40
57
|
custom_model_cost=custom_model_cost,
|
41
58
|
timeout=timeout,
|
42
|
-
post_processor=
|
43
|
-
max_upload_workers
|
44
|
-
user_context
|
45
|
-
user_gt
|
59
|
+
post_processor=post_processor,
|
60
|
+
max_upload_workers=max_upload_workers,
|
61
|
+
user_context=user_context,
|
62
|
+
user_gt=user_gt,
|
46
63
|
external_id=external_id
|
47
64
|
)
|
48
|
-
|
65
|
+
|
49
66
|
# Store the initial values
|
50
67
|
self._files_to_zip = files_to_zip
|
51
68
|
self._project_name = project_name
|
@@ -60,7 +77,6 @@ class DynamicTraceExporter(SpanExporter):
|
|
60
77
|
self._user_gt = user_gt
|
61
78
|
self._external_id = external_id
|
62
79
|
|
63
|
-
|
64
80
|
def export(self, spans):
|
65
81
|
"""
|
66
82
|
Export spans by forwarding to the underlying exporter.
|
@@ -84,8 +100,6 @@ class DynamicTraceExporter(SpanExporter):
|
|
84
100
|
return result
|
85
101
|
except Exception as e:
|
86
102
|
logger.error(f"Error exporting trace: {e}")
|
87
|
-
|
88
|
-
|
89
103
|
|
90
104
|
def shutdown(self):
|
91
105
|
"""
|
@@ -103,7 +117,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
103
117
|
return self._exporter.shutdown()
|
104
118
|
except Exception as e:
|
105
119
|
logger.error(f"Error shutting down exporter: {e}")
|
106
|
-
|
120
|
+
|
107
121
|
def _update_exporter_properties(self):
|
108
122
|
"""
|
109
123
|
Update the underlying exporter's properties with the current values.
|
@@ -118,55 +132,55 @@ class DynamicTraceExporter(SpanExporter):
|
|
118
132
|
self._exporter.post_processor = self._post_processor
|
119
133
|
self._exporter.max_upload_workers = self._max_upload_workers
|
120
134
|
self._exporter.user_context = self._user_context
|
121
|
-
self._exporter.user_gt = self._user_gt
|
135
|
+
self._exporter.user_gt = self._user_gt
|
122
136
|
self._exporter.external_id = self._external_id
|
123
|
-
|
137
|
+
|
124
138
|
# Getter and setter methods for dynamic properties
|
125
|
-
|
139
|
+
|
126
140
|
@property
|
127
141
|
def files_to_zip(self):
|
128
142
|
return self._files_to_zip
|
129
|
-
|
143
|
+
|
130
144
|
@files_to_zip.setter
|
131
145
|
def files_to_zip(self, value):
|
132
146
|
self._files_to_zip = value
|
133
|
-
|
147
|
+
|
134
148
|
@property
|
135
149
|
def project_name(self):
|
136
150
|
return self._project_name
|
137
|
-
|
151
|
+
|
138
152
|
@project_name.setter
|
139
153
|
def project_name(self, value):
|
140
154
|
self._project_name = value
|
141
|
-
|
155
|
+
|
142
156
|
@property
|
143
157
|
def project_id(self):
|
144
158
|
return self._project_id
|
145
|
-
|
159
|
+
|
146
160
|
@project_id.setter
|
147
161
|
def project_id(self, value):
|
148
162
|
self._project_id = value
|
149
|
-
|
163
|
+
|
150
164
|
@property
|
151
165
|
def dataset_name(self):
|
152
166
|
return self._dataset_name
|
153
|
-
|
167
|
+
|
154
168
|
@dataset_name.setter
|
155
169
|
def dataset_name(self, value):
|
156
170
|
self._dataset_name = value
|
157
|
-
|
171
|
+
|
158
172
|
@property
|
159
173
|
def user_details(self):
|
160
174
|
return self._user_details
|
161
|
-
|
175
|
+
|
162
176
|
@user_details.setter
|
163
177
|
def user_details(self, value):
|
164
178
|
self._user_details = value
|
165
|
-
|
179
|
+
|
166
180
|
@property
|
167
181
|
def base_url(self):
|
168
182
|
return self._base_url
|
169
|
-
|
183
|
+
|
170
184
|
@base_url.setter
|
171
185
|
def base_url(self, value):
|
172
186
|
self._base_url = value
|
@@ -174,15 +188,15 @@ class DynamicTraceExporter(SpanExporter):
|
|
174
188
|
@property
|
175
189
|
def custom_model_cost(self):
|
176
190
|
return self._custom_model_cost
|
177
|
-
|
191
|
+
|
178
192
|
@custom_model_cost.setter
|
179
193
|
def custom_model_cost(self, value):
|
180
194
|
self._custom_model_cost = value
|
181
|
-
|
195
|
+
|
182
196
|
@property
|
183
197
|
def max_upload_workers(self):
|
184
198
|
return self._max_upload_workers
|
185
|
-
|
199
|
+
|
186
200
|
@max_upload_workers.setter
|
187
201
|
def max_upload_workers(self, value):
|
188
202
|
self._max_upload_workers = value
|
@@ -190,7 +204,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
190
204
|
@property
|
191
205
|
def user_context(self):
|
192
206
|
return self._user_context
|
193
|
-
|
207
|
+
|
194
208
|
@user_context.setter
|
195
209
|
def user_context(self, value):
|
196
210
|
self._user_context = value
|
@@ -198,7 +212,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
198
212
|
@property
|
199
213
|
def user_gt(self):
|
200
214
|
return self._user_gt
|
201
|
-
|
215
|
+
|
202
216
|
@user_gt.setter
|
203
217
|
def user_gt(self, value):
|
204
218
|
self._user_gt = value
|
@@ -206,8 +220,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
206
220
|
@property
|
207
221
|
def external_id(self):
|
208
222
|
return self._external_id
|
209
|
-
|
223
|
+
|
210
224
|
@external_id.setter
|
211
225
|
def external_id(self, value):
|
212
226
|
self._external_id = value
|
213
|
-
|
@@ -9,7 +9,6 @@ import asyncio
|
|
9
9
|
from concurrent.futures import ThreadPoolExecutor
|
10
10
|
from opentelemetry.sdk.trace.export import SpanExporter
|
11
11
|
from ..utils import get_unique_key
|
12
|
-
from .raga_exporter import RagaExporter
|
13
12
|
|
14
13
|
# Set up logging
|
15
14
|
logging.basicConfig(level=logging.INFO)
|
@@ -3,10 +3,10 @@ import logging
|
|
3
3
|
import os
|
4
4
|
import tempfile
|
5
5
|
from dataclasses import asdict
|
6
|
+
from typing import Optional, Callable, Dict, List
|
6
7
|
|
7
8
|
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
8
9
|
|
9
|
-
from ragaai_catalyst.tracers.agentic_tracing.tracers.base import TracerJSONEncoder
|
10
10
|
from ragaai_catalyst.tracers.agentic_tracing.upload.trace_uploader import (
|
11
11
|
submit_upload_task,
|
12
12
|
)
|
@@ -25,8 +25,49 @@ logging_level = (
|
|
25
25
|
)
|
26
26
|
|
27
27
|
|
28
|
+
class TracerJSONEncoder(json.JSONEncoder):
|
29
|
+
def default(self, obj):
|
30
|
+
if isinstance(obj, datetime):
|
31
|
+
return obj.isoformat()
|
32
|
+
if isinstance(obj, bytes):
|
33
|
+
try:
|
34
|
+
return obj.decode("utf-8")
|
35
|
+
except UnicodeDecodeError:
|
36
|
+
return str(obj) # Fallback to string representation
|
37
|
+
if hasattr(obj, "to_dict"): # Handle objects with to_dict method
|
38
|
+
return obj.to_dict()
|
39
|
+
if hasattr(obj, "__dict__"):
|
40
|
+
# Filter out None values and handle nested serialization
|
41
|
+
return {
|
42
|
+
k: v
|
43
|
+
for k, v in obj.__dict__.items()
|
44
|
+
if v is not None and not k.startswith("_")
|
45
|
+
}
|
46
|
+
try:
|
47
|
+
# Try to convert to a basic type
|
48
|
+
return str(obj)
|
49
|
+
except:
|
50
|
+
return None # Last resort: return None instead of failing
|
51
|
+
|
52
|
+
|
28
53
|
class RAGATraceExporter(SpanExporter):
|
29
|
-
def __init__(
|
54
|
+
def __init__(
|
55
|
+
self,
|
56
|
+
project_name: str,
|
57
|
+
dataset_name: str,
|
58
|
+
base_url: str,
|
59
|
+
tracer_type: str,
|
60
|
+
files_to_zip: Optional[List[str]] = None,
|
61
|
+
project_id: Optional[str] = None,
|
62
|
+
user_details: Optional[Dict] = None,
|
63
|
+
custom_model_cost: Optional[dict] = None,
|
64
|
+
timeout: int = 120,
|
65
|
+
post_processor: Optional[Callable] = None,
|
66
|
+
max_upload_workers: int = 30,
|
67
|
+
user_context: Optional[str] = None,
|
68
|
+
user_gt: Optional[str] = None,
|
69
|
+
external_id: Optional[str] = None
|
70
|
+
):
|
30
71
|
self.trace_spans = dict()
|
31
72
|
self.tmp_dir = tempfile.gettempdir()
|
32
73
|
self.tracer_type = tracer_type
|
@@ -56,6 +97,9 @@ class RAGATraceExporter(SpanExporter):
|
|
56
97
|
if trace_id not in self.trace_spans:
|
57
98
|
self.trace_spans[trace_id] = list()
|
58
99
|
|
100
|
+
if span_json.get("attributes").get("openinference.span.kind", None) is None:
|
101
|
+
span_json["attributes"]["openinference.span.kind"] = "UNKNOWN"
|
102
|
+
|
59
103
|
self.trace_spans[trace_id].append(span_json)
|
60
104
|
|
61
105
|
if span_json["parent_id"] is None:
|
@@ -93,10 +137,10 @@ class RAGATraceExporter(SpanExporter):
|
|
93
137
|
if ragaai_trace_details is None:
|
94
138
|
logger.error(f"Cannot upload trace {trace_id}: conversion failed and returned None")
|
95
139
|
return # Exit early if conversion failed
|
96
|
-
|
140
|
+
|
97
141
|
# Upload the trace if upload_trace function is provided
|
98
142
|
try:
|
99
|
-
if self.post_processor!=None:
|
143
|
+
if self.post_processor != None:
|
100
144
|
ragaai_trace_details['trace_file_path'] = self.post_processor(ragaai_trace_details['trace_file_path'])
|
101
145
|
self.upload_trace(ragaai_trace_details, trace_id)
|
102
146
|
except Exception as e:
|
@@ -105,13 +149,14 @@ class RAGATraceExporter(SpanExporter):
|
|
105
149
|
def prepare_trace(self, spans, trace_id):
|
106
150
|
try:
|
107
151
|
try:
|
108
|
-
ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,
|
152
|
+
ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,
|
153
|
+
self.external_id)
|
109
154
|
except Exception as e:
|
110
155
|
print(f"Error in convert_json_format function: {trace_id}: {e}")
|
111
156
|
return None
|
112
|
-
|
157
|
+
|
113
158
|
try:
|
114
|
-
interactions = format_interactions(ragaai_trace)
|
159
|
+
interactions = format_interactions(ragaai_trace)
|
115
160
|
ragaai_trace["workflow"] = interactions['workflow']
|
116
161
|
except Exception as e:
|
117
162
|
print(f"Error in format_interactions function: {trace_id}: {e}")
|
@@ -158,18 +203,26 @@ class RAGATraceExporter(SpanExporter):
|
|
158
203
|
except Exception as e:
|
159
204
|
print(f"Error in adding tracer type: {trace_id}: {e}")
|
160
205
|
return None
|
161
|
-
|
162
|
-
#Add user passed metadata to the trace
|
206
|
+
|
207
|
+
# Add user passed metadata to the trace
|
163
208
|
try:
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
209
|
+
logger.debug("Started adding user passed metadata")
|
210
|
+
|
211
|
+
metadata = (
|
212
|
+
self.user_details.get("trace_user_detail", {}).get("metadata", {})
|
213
|
+
if self.user_details else {}
|
214
|
+
)
|
215
|
+
|
216
|
+
if isinstance(metadata, dict):
|
217
|
+
for key, value in metadata.items():
|
218
|
+
if key not in {"log_source", "recorded_on"}:
|
219
|
+
ragaai_trace.setdefault("metadata", {})[key] = value
|
220
|
+
|
221
|
+
logger.debug("Completed adding user passed metadata")
|
169
222
|
except Exception as e:
|
170
223
|
print(f"Error in adding metadata: {trace_id}: {e}")
|
171
224
|
return None
|
172
|
-
|
225
|
+
|
173
226
|
try:
|
174
227
|
# Save the trace_json
|
175
228
|
trace_file_path = os.path.join(self.tmp_dir, f"{trace_id}.json")
|
@@ -195,16 +248,16 @@ class RAGATraceExporter(SpanExporter):
|
|
195
248
|
hash_id = ragaai_trace_details['hash_id']
|
196
249
|
zip_path = ragaai_trace_details['code_zip_path']
|
197
250
|
self.upload_task_id = submit_upload_task(
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|
251
|
+
filepath=filepath,
|
252
|
+
hash_id=hash_id,
|
253
|
+
zip_path=zip_path,
|
254
|
+
project_name=self.project_name,
|
255
|
+
project_id=self.project_id,
|
256
|
+
dataset_name=self.dataset_name,
|
257
|
+
user_details=self.user_details,
|
258
|
+
base_url=self.base_url,
|
259
|
+
tracer_type=self.tracer_type,
|
260
|
+
timeout=self.timeout
|
261
|
+
)
|
262
|
+
|
263
|
+
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|
@@ -9,25 +9,16 @@ from litellm import model_cost
|
|
9
9
|
from pathlib import Path
|
10
10
|
from contextlib import contextmanager
|
11
11
|
from concurrent.futures import ThreadPoolExecutor
|
12
|
-
from ragaai_catalyst.tracers.langchain_callback import LangchainTracer
|
13
|
-
from ragaai_catalyst.tracers.utils.convert_langchain_callbacks_output import convert_langchain_callbacks_output
|
14
|
-
|
15
|
-
from ragaai_catalyst.tracers.utils.langchain_tracer_extraction_logic import langchain_tracer_extraction
|
16
|
-
from ragaai_catalyst.tracers.upload_traces import UploadTraces
|
17
12
|
import tempfile
|
18
13
|
import json
|
19
14
|
import numpy as np
|
20
15
|
from opentelemetry.sdk import trace as trace_sdk
|
21
16
|
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
22
17
|
from ragaai_catalyst.tracers.exporters.file_span_exporter import FileSpanExporter
|
23
|
-
from ragaai_catalyst.tracers.exporters.raga_exporter import RagaExporter
|
24
18
|
from ragaai_catalyst.tracers.utils import get_unique_key
|
25
|
-
# from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
|
26
|
-
from ragaai_catalyst.tracers.llamaindex_instrumentation import LlamaIndexInstrumentationTracer
|
27
19
|
from openinference.instrumentation.langchain import LangChainInstrumentor
|
28
20
|
from ragaai_catalyst import RagaAICatalyst
|
29
21
|
from ragaai_catalyst.tracers.agentic_tracing import AgenticTracing
|
30
|
-
from ragaai_catalyst.tracers.agentic_tracing.tracers.llm_tracer import LLMTracerMixin
|
31
22
|
from ragaai_catalyst.tracers.exporters.ragaai_trace_exporter import RAGATraceExporter
|
32
23
|
from ragaai_catalyst.tracers.agentic_tracing.utils.file_name_tracker import TrackName
|
33
24
|
|
@@ -178,21 +169,8 @@ class Tracer(AgenticTracing):
|
|
178
169
|
except requests.exceptions.RequestException as e:
|
179
170
|
logger.error(f"Failed to retrieve projects list: {e}")
|
180
171
|
|
181
|
-
# if tracer_type == "langchain":
|
182
|
-
# instrumentors = []
|
183
|
-
# from openinference.instrumentation.langchain import LangChainInstrumentor
|
184
|
-
# instrumentors += [(LangChainInstrumentor, [])]
|
185
|
-
# self._setup_agentic_tracer(instrumentors)
|
186
|
-
# elif tracer_type == "llamaindex":
|
187
|
-
# self._upload_task = None
|
188
|
-
# self.llamaindex_tracer = None
|
189
|
-
# elif tracer_type == "rag/langchain":
|
190
|
-
# instrumentors = []
|
191
|
-
# from openinference.instrumentation.langchain import LangChainInstrumentor
|
192
|
-
# instrumentors += [(LangChainInstrumentor, [])]
|
193
|
-
# self._setup_agentic_tracer(instrumentors)
|
194
172
|
# Handle agentic tracers
|
195
|
-
if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain":
|
173
|
+
if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain" or tracer_type == "llamaindex" or tracer_type == "google-adk":
|
196
174
|
# Setup instrumentors based on tracer type
|
197
175
|
instrumentors = []
|
198
176
|
|
@@ -334,8 +312,11 @@ class Tracer(AgenticTracing):
|
|
334
312
|
|
335
313
|
elif tracer_type == "agentic/openai_agents":
|
336
314
|
from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
|
337
|
-
instrumentors += [(OpenAIAgentsInstrumentor, [])]
|
315
|
+
instrumentors += [(OpenAIAgentsInstrumentor, [])]
|
338
316
|
|
317
|
+
elif tracer_type == "google-adk":
|
318
|
+
from openinference.instrumentation.google_adk import GoogleADKInstrumentor
|
319
|
+
instrumentors += [(GoogleADKInstrumentor, [])]
|
339
320
|
else:
|
340
321
|
# Unknown agentic tracer type
|
341
322
|
logger.warning(f"Unknown agentic tracer type: {tracer_type}")
|
@@ -490,23 +471,13 @@ class Tracer(AgenticTracing):
|
|
490
471
|
|
491
472
|
|
492
473
|
def set_external_id(self, external_id):
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
498
|
-
'pipeline': self.pipeline,
|
499
|
-
'metadata': self.metadata,
|
500
|
-
'description': self.description,
|
501
|
-
'timeout': self.timeout,
|
502
|
-
'update_llm_cost': self.update_llm_cost,
|
503
|
-
'auto_instrumentation': self.auto_instrumentation,
|
504
|
-
'interval_time': self.interval_time,
|
505
|
-
'max_upload_workers': self.max_upload_workers
|
506
|
-
}
|
507
|
-
|
474
|
+
"""
|
475
|
+
This method updates the external_id attribute of the dynamic exporter.
|
476
|
+
Args:
|
477
|
+
external_id (str): The new external_id to set
|
478
|
+
"""
|
508
479
|
self.dynamic_exporter.external_id = external_id
|
509
|
-
logger.debug(f"Updated external_id to {external_id}")
|
480
|
+
logger.debug(f"Updated dynamic exporter's external_id to {external_id}")
|
510
481
|
|
511
482
|
def set_dataset_name(self, dataset_name):
|
512
483
|
"""
|
@@ -524,106 +495,6 @@ class Tracer(AgenticTracing):
|
|
524
495
|
metadata.setdefault("recorded_on", str(datetime.datetime.now()))
|
525
496
|
return metadata
|
526
497
|
|
527
|
-
def _add_unique_key(self, data, key_name):
|
528
|
-
data[key_name] = get_unique_key(data)
|
529
|
-
return data
|
530
|
-
|
531
|
-
def _setup_provider(self):
|
532
|
-
self.filespanx = FileSpanExporter(
|
533
|
-
project_name=self.project_name,
|
534
|
-
metadata=self.metadata,
|
535
|
-
pipeline=self.pipeline,
|
536
|
-
raga_client=self.raga_client,
|
537
|
-
)
|
538
|
-
tracer_provider = trace_sdk.TracerProvider()
|
539
|
-
tracer_provider.add_span_processor(SimpleSpanProcessor(self.filespanx))
|
540
|
-
return tracer_provider
|
541
|
-
|
542
|
-
def _setup_instrumentor(self, tracer_type):
|
543
|
-
instrumentors = {
|
544
|
-
"langchain": LangchainInstrumentor,
|
545
|
-
"openai": OpenAIInstrumentor,
|
546
|
-
"llama_index": LlamaIndexInstrumentor,
|
547
|
-
}
|
548
|
-
if tracer_type not in instrumentors:
|
549
|
-
logger.error(f"Invalid tracer type: {tracer_type}")
|
550
|
-
return instrumentors[tracer_type]().get()
|
551
|
-
|
552
|
-
@contextmanager
|
553
|
-
def trace(self):
|
554
|
-
"""
|
555
|
-
Synchronous context manager for tracing.
|
556
|
-
Usage:
|
557
|
-
with tracer.trace():
|
558
|
-
# Your code here
|
559
|
-
"""
|
560
|
-
self.start()
|
561
|
-
try:
|
562
|
-
yield self
|
563
|
-
finally:
|
564
|
-
self.stop()
|
565
|
-
|
566
|
-
def start(self):
|
567
|
-
"""Start the tracer."""
|
568
|
-
if self.tracer_type == "langchain":
|
569
|
-
super().start()
|
570
|
-
return self
|
571
|
-
elif self.tracer_type == "llamaindex":
|
572
|
-
super().start()
|
573
|
-
return self
|
574
|
-
|
575
|
-
# self.llamaindex_tracer = LlamaIndexInstrumentationTracer(self._pass_user_data())
|
576
|
-
# return self.llamaindex_tracer.start()
|
577
|
-
elif self.tracer_type == "rag/langchain":
|
578
|
-
super().start()
|
579
|
-
return self
|
580
|
-
else:
|
581
|
-
super().start()
|
582
|
-
return self
|
583
|
-
|
584
|
-
def stop(self):
|
585
|
-
"""Stop the tracer and initiate trace upload."""
|
586
|
-
if self.tracer_type == "langchain":
|
587
|
-
super().stop()
|
588
|
-
return self
|
589
|
-
elif self.tracer_type == "llamaindex":
|
590
|
-
super().stop()
|
591
|
-
return self
|
592
|
-
|
593
|
-
|
594
|
-
# if self.llamaindex_tracer is None:
|
595
|
-
# raise ValueError("LlamaIndex tracer was not started")
|
596
|
-
|
597
|
-
# user_detail = self._pass_user_data()
|
598
|
-
# converted_back_to_callback = self.llamaindex_tracer.stop()
|
599
|
-
|
600
|
-
# filepath_3 = os.path.join(os.getcwd(), "llama_final_result.json")
|
601
|
-
# with open(filepath_3, 'w') as f:
|
602
|
-
# json.dump(converted_back_to_callback, f, default=str, indent=2)
|
603
|
-
|
604
|
-
# # Apply post-processor if registered
|
605
|
-
# if self.post_processor is not None:
|
606
|
-
# try:
|
607
|
-
# final_trace_filepath = self.post_processor(filepath_3)
|
608
|
-
# logger.debug(f"Post-processor applied successfully, new path: {filepath_3}")
|
609
|
-
# except Exception as e:
|
610
|
-
# logger.error(f"Error in post-processing: {e}")
|
611
|
-
# else:
|
612
|
-
# final_trace_filepath = filepath_3
|
613
|
-
|
614
|
-
# if converted_back_to_callback:
|
615
|
-
# UploadTraces(json_file_path=final_trace_filepath,
|
616
|
-
# project_name=self.project_name,
|
617
|
-
# project_id=self.project_id,
|
618
|
-
# dataset_name=self.dataset_name,
|
619
|
-
# user_detail=user_detail,
|
620
|
-
# base_url=self.base_url
|
621
|
-
# ).upload_traces()
|
622
|
-
# return
|
623
|
-
elif self.tracer_type == "rag/langchain":
|
624
|
-
super().stop()
|
625
|
-
else:
|
626
|
-
super().stop()
|
627
498
|
|
628
499
|
def get_upload_status(self):
|
629
500
|
"""Check the status of the trace upload."""
|
@@ -638,54 +509,6 @@ class Tracer(AgenticTracing):
|
|
638
509
|
return f"Upload failed: {str(e)}"
|
639
510
|
return "Upload in progress..."
|
640
511
|
|
641
|
-
def _run_async(self, coroutine):
|
642
|
-
"""Run an asynchronous coroutine in a separate thread."""
|
643
|
-
loop = asyncio.new_event_loop()
|
644
|
-
with ThreadPoolExecutor() as executor:
|
645
|
-
future = executor.submit(lambda: loop.run_until_complete(coroutine))
|
646
|
-
return future
|
647
|
-
|
648
|
-
async def _upload_traces(self):
|
649
|
-
"""
|
650
|
-
Asynchronously uploads traces to the RagaAICatalyst server.
|
651
|
-
|
652
|
-
This function uploads the traces generated by the RagaAICatalyst client to the RagaAICatalyst server. It uses the `aiohttp` library to make an asynchronous HTTP request to the server. The function first checks if the `RAGAAI_CATALYST_TOKEN` environment variable is set. If not, it raises a `ValueError` with the message "RAGAAI_CATALYST_TOKEN not found. Cannot upload traces.".
|
653
|
-
|
654
|
-
The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `timeout` attribute of the `Tracer` object.
|
655
|
-
|
656
|
-
If the upload is successful, the function returns the string "Files uploaded successfully" if the `upload_stat` variable is truthy, otherwise it returns the string "No files to upload".
|
657
|
-
|
658
|
-
If the upload times out, the function returns a string with the message "Upload timed out after {self.timeout} seconds".
|
659
|
-
|
660
|
-
If any other exception occurs during the upload, the function returns a string with the message "Upload failed: {str(e)}", where `{str(e)}` is the string representation of the exception.
|
661
|
-
|
662
|
-
Parameters:
|
663
|
-
None
|
664
|
-
|
665
|
-
Returns:
|
666
|
-
A string indicating the status of the upload.
|
667
|
-
"""
|
668
|
-
async with aiohttp.ClientSession() as session:
|
669
|
-
if not os.getenv("RAGAAI_CATALYST_TOKEN"):
|
670
|
-
logger.error("RAGAAI_CATALYST_TOKEN not found. Cannot upload traces.")
|
671
|
-
|
672
|
-
try:
|
673
|
-
upload_stat = await asyncio.wait_for(
|
674
|
-
self.raga_client.check_and_upload_files(
|
675
|
-
session=session,
|
676
|
-
file_paths=[self.filespanx.sync_file],
|
677
|
-
),
|
678
|
-
timeout=self.timeout,
|
679
|
-
)
|
680
|
-
return (
|
681
|
-
"Files uploaded successfully"
|
682
|
-
if upload_stat
|
683
|
-
else "No files to upload"
|
684
|
-
)
|
685
|
-
except asyncio.TimeoutError:
|
686
|
-
return f"Upload timed out after {self.timeout} seconds"
|
687
|
-
except Exception as e:
|
688
|
-
return f"Upload failed: {str(e)}"
|
689
512
|
|
690
513
|
def _cleanup(self):
|
691
514
|
"""
|
@@ -777,13 +600,13 @@ class Tracer(AgenticTracing):
|
|
777
600
|
|
778
601
|
# Create a dynamic exporter that allows property updates
|
779
602
|
self.dynamic_exporter = DynamicTraceExporter(
|
603
|
+
project_name=self.project_name,
|
604
|
+
dataset_name=self.dataset_name,
|
605
|
+
base_url=self.base_url,
|
780
606
|
tracer_type=self.tracer_type,
|
781
607
|
files_to_zip=list_of_unique_files,
|
782
|
-
project_name=self.project_name,
|
783
608
|
project_id=self.project_id,
|
784
|
-
dataset_name=self.dataset_name,
|
785
609
|
user_details=self.user_details,
|
786
|
-
base_url=self.base_url,
|
787
610
|
custom_model_cost=self.model_custom_cost,
|
788
611
|
timeout = self.timeout,
|
789
612
|
post_processor= self.post_processor,
|
@@ -188,7 +188,13 @@ def convert_json_format(
|
|
188
188
|
]
|
189
189
|
model_name = next((name for name in reversed(model_names) if name), "")
|
190
190
|
if not model_name and span["attributes"].get("openinference.span.kind")=="LLM":
|
191
|
-
|
191
|
+
try:
|
192
|
+
metadata = span["attributes"].get("metadata") or span["attributes"].get("aiq.metadata")
|
193
|
+
metadata = json.loads(metadata)
|
194
|
+
model_name = metadata.get("ls_model_name", "")
|
195
|
+
except Exception as e:
|
196
|
+
model_name = ""
|
197
|
+
logger.error(f"Failed to parse metadata: {e}", exc_info=True)
|
192
198
|
if model_name and span["attributes"].get("openinference.span.kind") == "LLM":
|
193
199
|
try:
|
194
200
|
model_costs = get_model_cost()
|