ragaai-catalyst 2.1.7.5b5__py3-none-any.whl → 2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +3 -13
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +18 -17
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +12 -44
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +20 -5
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +22 -1
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +28 -97
- ragaai_catalyst/tracers/tracer.py +105 -134
- ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py +205 -0
- ragaai_catalyst/tracers/utils/rag_trace_json_converter.py +223 -189
- ragaai_catalyst/tracers/utils/trace_json_converter.py +118 -200
- {ragaai_catalyst-2.1.7.5b5.dist-info → ragaai_catalyst-2.2.dist-info}/METADATA +1 -1
- {ragaai_catalyst-2.1.7.5b5.dist-info → ragaai_catalyst-2.2.dist-info}/RECORD +15 -15
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py +0 -114
- {ragaai_catalyst-2.1.7.5b5.dist-info → ragaai_catalyst-2.2.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.1.7.5b5.dist-info → ragaai_catalyst-2.2.dist-info}/licenses/LICENSE +0 -0
- {ragaai_catalyst-2.1.7.5b5.dist-info → ragaai_catalyst-2.2.dist-info}/top_level.txt +0 -0
@@ -1269,17 +1269,7 @@ class BaseTracer:
|
|
1269
1269
|
)
|
1270
1270
|
print(f"Schema created: {response}")
|
1271
1271
|
|
1272
|
-
# 2.
|
1273
|
-
print("Uploading trace metrics...")
|
1274
|
-
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
1275
|
-
response = upload_trace_metric(
|
1276
|
-
json_file_path=trace_file,
|
1277
|
-
dataset_name=self.dataset_name,
|
1278
|
-
project_name=self.project_name,
|
1279
|
-
)
|
1280
|
-
print(f"Metrics uploaded: {response}")
|
1281
|
-
|
1282
|
-
# 3. Get code hash and zip path if available
|
1272
|
+
# 2. Get code hash and zip path if available
|
1283
1273
|
code_hash = None
|
1284
1274
|
zip_path = None
|
1285
1275
|
try:
|
@@ -1293,7 +1283,7 @@ class BaseTracer:
|
|
1293
1283
|
except Exception as e:
|
1294
1284
|
print(f"Error getting code hash: {e}")
|
1295
1285
|
|
1296
|
-
#
|
1286
|
+
# 3. Upload agentic traces
|
1297
1287
|
print("Uploading agentic traces...")
|
1298
1288
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_agentic_traces import UploadAgenticTraces
|
1299
1289
|
from ragaai_catalyst import RagaAICatalyst
|
@@ -1308,7 +1298,7 @@ class BaseTracer:
|
|
1308
1298
|
upload_traces.upload_agentic_traces()
|
1309
1299
|
print("Agentic traces uploaded successfully")
|
1310
1300
|
|
1311
|
-
#
|
1301
|
+
# 4. Upload code hash if available
|
1312
1302
|
if code_hash and zip_path and os.path.exists(zip_path):
|
1313
1303
|
print(f"Uploading code hash: {code_hash}")
|
1314
1304
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
|
@@ -45,7 +45,7 @@ logger = logging.getLogger("trace_uploader")
|
|
45
45
|
try:
|
46
46
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_agentic_traces import UploadAgenticTraces
|
47
47
|
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_code import upload_code
|
48
|
-
from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
48
|
+
# from ragaai_catalyst.tracers.agentic_tracing.upload.upload_trace_metric import upload_trace_metric
|
49
49
|
from ragaai_catalyst.tracers.agentic_tracing.utils.create_dataset_schema import create_dataset_schema_with_trace
|
50
50
|
from ragaai_catalyst import RagaAICatalyst
|
51
51
|
IMPORTS_AVAILABLE = True
|
@@ -137,6 +137,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
137
137
|
dataset_name=dataset_name,
|
138
138
|
project_name=project_name,
|
139
139
|
base_url=base_url,
|
140
|
+
user_details=user_details,
|
140
141
|
timeout=timeout
|
141
142
|
)
|
142
143
|
logger.info(f"Dataset schema created: {response}")
|
@@ -145,22 +146,22 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
145
146
|
# Continue with other steps
|
146
147
|
|
147
148
|
# Step 2: Upload trace metrics
|
148
|
-
if filepath and os.path.exists(filepath):
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
else:
|
163
|
-
|
149
|
+
# if filepath and os.path.exists(filepath):
|
150
|
+
# logger.info(f"Uploading trace metrics for {filepath} with base_url: {base_url} and timeout: {timeout}")
|
151
|
+
# try:
|
152
|
+
# response = upload_trace_metric(
|
153
|
+
# json_file_path=filepath,
|
154
|
+
# dataset_name=dataset_name,
|
155
|
+
# project_name=project_name,
|
156
|
+
# base_url=base_url,
|
157
|
+
# timeout=timeout
|
158
|
+
# )
|
159
|
+
# logger.info(f"Trace metrics uploaded: {response}")
|
160
|
+
# except Exception as e:
|
161
|
+
# logger.error(f"Error uploading trace metrics: {e}")
|
162
|
+
# # Continue with other uploads
|
163
|
+
# else:
|
164
|
+
# logger.warning(f"Trace file {filepath} not found, skipping metrics upload")
|
164
165
|
|
165
166
|
# Step 3: Upload agentic traces
|
166
167
|
if filepath and os.path.exists(filepath):
|
@@ -163,55 +163,23 @@ class UploadAgenticTraces:
|
|
163
163
|
return None
|
164
164
|
try:
|
165
165
|
spans = data["data"][0]["spans"]
|
166
|
-
|
166
|
+
dataset_spans = []
|
167
167
|
for span in spans:
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
datasetSpans = [dict(t) for t in set(tuple(sorted(d.items())) for d in datasetSpans)]
|
180
|
-
|
181
|
-
return datasetSpans
|
168
|
+
try:
|
169
|
+
dataset_spans.append({
|
170
|
+
"spanId": span.get("context", {}).get("span_id", ""),
|
171
|
+
"spanName": span.get("name", ""),
|
172
|
+
"spanHash": span.get("hash_id", ""),
|
173
|
+
"spanType": span.get("attributes", {}).get("openinference.span.kind", ""),
|
174
|
+
})
|
175
|
+
except Exception as e:
|
176
|
+
logger.warning(f"Error processing span: {e}")
|
177
|
+
continue
|
178
|
+
return dataset_spans
|
182
179
|
except Exception as e:
|
183
180
|
print(f"Error while reading dataset spans: {e}")
|
184
181
|
return None
|
185
182
|
|
186
|
-
def _get_agent_dataset_spans(self, span, datasetSpans):
|
187
|
-
datasetSpans.append({
|
188
|
-
"spanId": span["id"],
|
189
|
-
"spanName": span["name"],
|
190
|
-
"spanHash": span["hash_id"],
|
191
|
-
"spanType": span["type"],
|
192
|
-
})
|
193
|
-
children = span["data"]["children"]
|
194
|
-
for child in children:
|
195
|
-
if child["type"] != "agent":
|
196
|
-
existing_span = next((s for s in datasetSpans if s["spanHash"] == child["hash_id"]), None)
|
197
|
-
if existing_span is None:
|
198
|
-
datasetSpans.append({
|
199
|
-
"spanId": child["id"],
|
200
|
-
"spanName": child["name"],
|
201
|
-
"spanHash": child["hash_id"],
|
202
|
-
"spanType": child["type"],
|
203
|
-
})
|
204
|
-
else:
|
205
|
-
datasetSpans.append({
|
206
|
-
"spanId": child["id"],
|
207
|
-
"spanName": child["name"],
|
208
|
-
"spanHash": child["hash_id"],
|
209
|
-
"spanType": child["type"],
|
210
|
-
})
|
211
|
-
self._get_agent_dataset_spans(child, datasetSpans)
|
212
|
-
return datasetSpans
|
213
|
-
|
214
|
-
|
215
183
|
def upload_agentic_traces(self):
|
216
184
|
try:
|
217
185
|
presignedUrl = self._get_presigned_url()
|
@@ -4,17 +4,32 @@ import re
|
|
4
4
|
import requests
|
5
5
|
from ragaai_catalyst.tracers.agentic_tracing.tracers.base import RagaAICatalyst
|
6
6
|
|
7
|
-
def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, timeout=120):
|
7
|
+
def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, user_details=None, timeout=120):
|
8
|
+
SCHEMA_MAPPING = {}
|
9
|
+
metadata = user_details.get("trace_user_detail").get("metadata")
|
10
|
+
if metadata and isinstance(metadata, dict):
|
11
|
+
for key, value in metadata.items():
|
12
|
+
if key in ["log_source", "recorded_on"]:
|
13
|
+
continue
|
14
|
+
SCHEMA_MAPPING[key] = {"columnType": "metadata"}
|
15
|
+
|
8
16
|
def make_request():
|
9
17
|
headers = {
|
10
18
|
"Content-Type": "application/json",
|
11
19
|
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
12
20
|
"X-Project-Name": project_name,
|
13
21
|
}
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
22
|
+
if SCHEMA_MAPPING:
|
23
|
+
payload = json.dumps({
|
24
|
+
"datasetName": dataset_name,
|
25
|
+
"traceFolderUrl": None,
|
26
|
+
"schemaMapping": SCHEMA_MAPPING
|
27
|
+
})
|
28
|
+
else:
|
29
|
+
payload = json.dumps({
|
30
|
+
"datasetName": dataset_name,
|
31
|
+
"traceFolderUrl": None,
|
32
|
+
})
|
18
33
|
# Use provided base_url or fall back to default
|
19
34
|
url_base = base_url if base_url is not None else RagaAICatalyst.BASE_URL
|
20
35
|
response = requests.request("POST",
|
@@ -14,7 +14,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
14
14
|
certain properties to be updated dynamically during execution.
|
15
15
|
"""
|
16
16
|
|
17
|
-
def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, external_id=None):
|
17
|
+
def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
|
18
18
|
"""
|
19
19
|
Initialize the DynamicTraceExporter.
|
20
20
|
|
@@ -42,6 +42,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
42
42
|
post_processor= post_processor,
|
43
43
|
max_upload_workers = max_upload_workers,
|
44
44
|
user_context = user_context,
|
45
|
+
user_gt = user_gt,
|
45
46
|
external_id=external_id
|
46
47
|
)
|
47
48
|
|
@@ -56,6 +57,8 @@ class DynamicTraceExporter(SpanExporter):
|
|
56
57
|
self._post_processor = post_processor
|
57
58
|
self._max_upload_workers = max_upload_workers
|
58
59
|
self._user_context = user_context
|
60
|
+
self._user_gt = user_gt
|
61
|
+
self._external_id = external_id
|
59
62
|
|
60
63
|
|
61
64
|
def export(self, spans):
|
@@ -115,6 +118,8 @@ class DynamicTraceExporter(SpanExporter):
|
|
115
118
|
self._exporter.post_processor = self._post_processor
|
116
119
|
self._exporter.max_upload_workers = self._max_upload_workers
|
117
120
|
self._exporter.user_context = self._user_context
|
121
|
+
self._exporter.user_gt = self._user_gt
|
122
|
+
self._exporter.external_id = self._external_id
|
118
123
|
|
119
124
|
# Getter and setter methods for dynamic properties
|
120
125
|
|
@@ -190,3 +195,19 @@ class DynamicTraceExporter(SpanExporter):
|
|
190
195
|
def user_context(self, value):
|
191
196
|
self._user_context = value
|
192
197
|
|
198
|
+
@property
|
199
|
+
def user_gt(self):
|
200
|
+
return self._user_gt
|
201
|
+
|
202
|
+
@user_gt.setter
|
203
|
+
def user_gt(self, value):
|
204
|
+
self._user_gt = value
|
205
|
+
|
206
|
+
@property
|
207
|
+
def external_id(self):
|
208
|
+
return self._external_id
|
209
|
+
|
210
|
+
@external_id.setter
|
211
|
+
def external_id(self, value):
|
212
|
+
self._external_id = value
|
213
|
+
|
@@ -26,7 +26,7 @@ logging_level = (
|
|
26
26
|
|
27
27
|
|
28
28
|
class RAGATraceExporter(SpanExporter):
|
29
|
-
def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, external_id=None):
|
29
|
+
def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
|
30
30
|
self.trace_spans = dict()
|
31
31
|
self.tmp_dir = tempfile.gettempdir()
|
32
32
|
self.tracer_type = tracer_type
|
@@ -42,6 +42,7 @@ class RAGATraceExporter(SpanExporter):
|
|
42
42
|
self.post_processor = post_processor
|
43
43
|
self.max_upload_workers = max_upload_workers
|
44
44
|
self.user_context = user_context
|
45
|
+
self.user_gt = user_gt
|
45
46
|
self.external_id = external_id
|
46
47
|
|
47
48
|
def export(self, spans):
|
@@ -82,10 +83,7 @@ class RAGATraceExporter(SpanExporter):
|
|
82
83
|
def process_complete_trace(self, spans, trace_id):
|
83
84
|
# Convert the trace to ragaai trace format
|
84
85
|
try:
|
85
|
-
|
86
|
-
ragaai_trace_details, additional_metadata = self.prepare_rag_trace(spans, trace_id)
|
87
|
-
else:
|
88
|
-
ragaai_trace_details = self.prepare_trace(spans, trace_id)
|
86
|
+
ragaai_trace_details = self.prepare_trace(spans, trace_id)
|
89
87
|
except Exception as e:
|
90
88
|
print(f"Error converting trace {trace_id}: {e}")
|
91
89
|
return # Exit early if conversion fails
|
@@ -97,33 +95,16 @@ class RAGATraceExporter(SpanExporter):
|
|
97
95
|
|
98
96
|
# Upload the trace if upload_trace function is provided
|
99
97
|
try:
|
100
|
-
if self.post_processor!=None
|
98
|
+
if self.post_processor!=None:
|
101
99
|
ragaai_trace_details['trace_file_path'] = self.post_processor(ragaai_trace_details['trace_file_path'])
|
102
|
-
|
103
|
-
|
104
|
-
try:
|
105
|
-
loop = asyncio.get_event_loop()
|
106
|
-
if loop.is_running():
|
107
|
-
# We're in a running event loop (like in Colab/Jupyter)
|
108
|
-
# Create a future and run the coroutine
|
109
|
-
future = asyncio.ensure_future(self.upload_rag_trace(ragaai_trace_details, additional_metadata, trace_id, self.post_processor))
|
110
|
-
# We don't wait for it to complete as this would block the event loop
|
111
|
-
logger.info(f"Scheduled async upload for trace {trace_id} in existing event loop")
|
112
|
-
else:
|
113
|
-
# No running event loop, use asyncio.run()
|
114
|
-
asyncio.run(self.upload_rag_trace(ragaai_trace_details, additional_metadata, trace_id, self.post_processor))
|
115
|
-
except RuntimeError:
|
116
|
-
# No event loop exists, create one
|
117
|
-
asyncio.run(self.upload_rag_trace(ragaai_trace_details, additional_metadata, trace_id, self.post_processor))
|
118
|
-
else:
|
119
|
-
self.upload_trace(ragaai_trace_details, trace_id)
|
120
|
-
except Exception as e:
|
100
|
+
self.upload_trace(ragaai_trace_details, trace_id)
|
101
|
+
except Exception as e:
|
121
102
|
print(f"Error uploading trace {trace_id}: {e}")
|
122
103
|
|
123
104
|
def prepare_trace(self, spans, trace_id):
|
124
105
|
try:
|
125
106
|
try:
|
126
|
-
ragaai_trace = convert_json_format(spans, self.custom_model_cost)
|
107
|
+
ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,self.external_id)
|
127
108
|
except Exception as e:
|
128
109
|
print(f"Error in convert_json_format function: {trace_id}: {e}")
|
129
110
|
return None
|
@@ -169,12 +150,32 @@ class RAGATraceExporter(SpanExporter):
|
|
169
150
|
except Exception as e:
|
170
151
|
print(f"Error in adding project name: {trace_id}: {e}")
|
171
152
|
return None
|
153
|
+
|
154
|
+
try:
|
155
|
+
# Add tracer type to the trace
|
156
|
+
ragaai_trace["tracer_type"] = self.tracer_type
|
157
|
+
except Exception as e:
|
158
|
+
print(f"Error in adding tracer type: {trace_id}: {e}")
|
159
|
+
return None
|
160
|
+
|
161
|
+
#Add user passed metadata to the trace
|
162
|
+
try:
|
163
|
+
if self.user_details.get("trace_user_detail").get("metadata") and isinstance(self.user_details.get("trace_user_detail").get("metadata"), dict):
|
164
|
+
for key, value in self.user_details.get("trace_user_detail").get("metadata").items():
|
165
|
+
if key in ["log_source", "recorded_on"]:
|
166
|
+
continue
|
167
|
+
ragaai_trace["metadata"][key] = value
|
168
|
+
except Exception as e:
|
169
|
+
print(f"Error in adding metadata: {trace_id}: {e}")
|
170
|
+
return None
|
172
171
|
|
173
172
|
try:
|
174
173
|
# Save the trace_json
|
175
174
|
trace_file_path = os.path.join(self.tmp_dir, f"{trace_id}.json")
|
176
175
|
with open(trace_file_path, "w") as file:
|
177
176
|
json.dump(ragaai_trace, file, cls=TracerJSONEncoder, indent=2)
|
177
|
+
with open(os.path.join(os.getcwd(), 'rag_agent_traces.json'), 'w') as f:
|
178
|
+
json.dump(ragaai_trace, f, cls=TracerJSONEncoder, indent=2)
|
178
179
|
except Exception as e:
|
179
180
|
print(f"Error in saving trace json: {trace_id}: {e}")
|
180
181
|
return None
|
@@ -204,74 +205,4 @@ class RAGATraceExporter(SpanExporter):
|
|
204
205
|
timeout=self.timeout
|
205
206
|
)
|
206
207
|
|
207
|
-
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|
208
|
-
|
209
|
-
async def upload_rag_trace(self, ragaai_trace, additional_metadata, trace_id, post_processor=None):
|
210
|
-
try:
|
211
|
-
ragaai_trace[0]['external_id'] = self.external_id
|
212
|
-
trace_file_path = os.path.join(self.tmp_dir, f"{trace_id}.json")
|
213
|
-
with open(trace_file_path, 'w') as f:
|
214
|
-
json.dump(ragaai_trace, f, indent=2)
|
215
|
-
logger.info(f"Trace file saved at {trace_file_path}")
|
216
|
-
if self.post_processor!=None:
|
217
|
-
trace_file_path = self.post_processor(trace_file_path)
|
218
|
-
logger.info(f"After post processing Trace file saved at {trace_file_path}")
|
219
|
-
|
220
|
-
# Create a ThreadPoolExecutor with max_workers=30
|
221
|
-
with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_upload_workers) as executor:
|
222
|
-
# Create a partial function with all the necessary arguments
|
223
|
-
upload_func = partial(
|
224
|
-
UploadTraces(
|
225
|
-
json_file_path=trace_file_path,
|
226
|
-
project_name=self.project_name,
|
227
|
-
project_id=self.project_id,
|
228
|
-
dataset_name=self.dataset_name,
|
229
|
-
user_detail=self.user_details,
|
230
|
-
base_url=self.base_url
|
231
|
-
).upload_traces,
|
232
|
-
additional_metadata_keys=additional_metadata
|
233
|
-
)
|
234
|
-
|
235
|
-
# Implement retry logic - attempt upload up to 3 times
|
236
|
-
max_retries = 3
|
237
|
-
retry_count = 0
|
238
|
-
last_exception = None
|
239
|
-
|
240
|
-
while retry_count < max_retries:
|
241
|
-
try:
|
242
|
-
# Submit the task to the executor and get a future
|
243
|
-
loop = asyncio.get_event_loop()
|
244
|
-
await loop.run_in_executor(executor, upload_func)
|
245
|
-
|
246
|
-
logger.info(f"Successfully uploaded rag trace {trace_id} on attempt {retry_count + 1}")
|
247
|
-
return # Exit the method if upload is successful
|
248
|
-
except Exception as e:
|
249
|
-
retry_count += 1
|
250
|
-
last_exception = e
|
251
|
-
logger.warning(f"Attempt {retry_count} to upload rag trace {trace_id} failed: {str(e)}")
|
252
|
-
|
253
|
-
if retry_count < max_retries:
|
254
|
-
# Add a small delay before retrying (exponential backoff)
|
255
|
-
await asyncio.sleep(2 ** retry_count) # 2, 4, 8 seconds
|
256
|
-
|
257
|
-
# If we've exhausted all retries, log the error
|
258
|
-
logger.error(f"Failed to upload rag trace {trace_id} after {max_retries} attempts. Last error: {str(last_exception)}")
|
259
|
-
except Exception as e:
|
260
|
-
logger.error(f"Error preparing rag trace {trace_id} for upload: {str(e)}")
|
261
|
-
|
262
|
-
def prepare_rag_trace(self, spans, trace_id):
|
263
|
-
try:
|
264
|
-
ragaai_trace, additional_metadata = rag_trace_json_converter(spans, self.custom_model_cost, trace_id, self.user_details, self.tracer_type,self.user_context)
|
265
|
-
ragaai_trace["metadata"]["recorded_on"] = datetime.datetime.now().astimezone().isoformat()
|
266
|
-
ragaai_trace["metadata"]["log_source"] = "langchain_tracer"
|
267
|
-
|
268
|
-
if True:
|
269
|
-
converted_ragaai_trace = convert_langchain_callbacks_output(ragaai_trace, self.project_name, ragaai_trace["metadata"], ragaai_trace["pipeline"])
|
270
|
-
else:
|
271
|
-
converted_ragaai_trace = ragaai_trace
|
272
|
-
|
273
|
-
return converted_ragaai_trace, additional_metadata
|
274
|
-
|
275
|
-
except Exception as e:
|
276
|
-
logger.error(f"Error converting trace {trace_id}: {str(e)}")
|
277
|
-
return None
|
208
|
+
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|