ragaai-catalyst 2.2.4b5__py3-none-any.whl → 2.2.4.1b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,9 @@ from urllib.parse import urlparse, urlunparse
8
8
  import requests
9
9
 
10
10
  logger = logging.getLogger(__name__)
11
+ logging_level = (
12
+ logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
13
+ )
11
14
 
12
15
  from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
13
16
 
@@ -44,6 +47,7 @@ class UploadAgenticTraces:
44
47
  "X-Project-Name": self.project_name,
45
48
  }
46
49
 
50
+ logger.debug("Started getting presigned url: ")
47
51
  try:
48
52
  start_time = time.time()
49
53
  endpoint = f"{self.base_url}/v1/llm/presigned-url"
@@ -55,10 +59,11 @@ class UploadAgenticTraces:
55
59
  logger.debug(
56
60
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
57
61
  )
58
-
59
- if response.status_code == 200:
62
+ if response.status_code in [200, 201]:
60
63
  presignedURLs = response.json()["data"]["presignedUrls"][0]
64
+ logger.debug(f"Got presigned url: {presignedURLs}")
61
65
  presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
66
+ logger.debug(f"Updated presigned url: {presignedurl}")
62
67
  return presignedurl
63
68
  else:
64
69
  # If POST fails, try GET
@@ -69,11 +74,13 @@ class UploadAgenticTraces:
69
74
  logger.debug(
70
75
  f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
71
76
  )
72
- if response.status_code == 200:
77
+ if response.status_code in [200, 201]:
73
78
  presignedURLs = response.json()["data"]["presignedUrls"][0]
79
+ logger.debug(f"Got presigned url: {presignedURLs}")
74
80
  presignedurl = self.update_presigned_url(
75
81
  presignedURLs, self.base_url
76
82
  )
83
+ logger.debug(f"Updated presigned url: {presignedurl}")
77
84
  return presignedurl
78
85
  elif response.status_code == 401:
79
86
  logger.warning("Received 401 error. Attempting to refresh token.")
@@ -94,11 +101,13 @@ class UploadAgenticTraces:
94
101
  logger.debug(
95
102
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
96
103
  )
97
- if response.status_code == 200:
104
+ if response.status_code in [200, 201]:
98
105
  presignedURLs = response.json()["data"]["presignedUrls"][0]
106
+ logger.debug(f"Got presigned url: {presignedURLs}")
99
107
  presignedurl = self.update_presigned_url(
100
108
  presignedURLs, self.base_url
101
109
  )
110
+ logger.debug(f"Updated presigned url: {presignedurl}")
102
111
  return presignedurl
103
112
  else:
104
113
  logger.error(
@@ -174,19 +183,22 @@ class UploadAgenticTraces:
174
183
  "datasetSpans": self._get_dataset_spans(), # Extra key for agentic traces
175
184
  }
176
185
  )
186
+ logger.debug(f"Inserting agentic traces to presigned url: {presignedUrl}")
177
187
  try:
178
188
  start_time = time.time()
179
189
  endpoint = f"{self.base_url}/v1/llm/insert/trace"
180
190
  response = requests.request(
181
191
  "POST", endpoint, headers=headers, data=payload, timeout=self.timeout
182
192
  )
193
+ logger.debug(f"Payload: {payload}")
194
+ logger.debug(f"Headers: {headers}")
183
195
  elapsed_ms = (time.time() - start_time) * 1000
184
196
  logger.debug(
185
197
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
186
198
  )
187
- if response.status_code != 200:
188
- print(f"Error inserting traces: {response.json()['message']}")
189
- return False
199
+ if response.status_code in [200, 201]:
200
+ logger.debug("Successfully inserted traces")
201
+ return True
190
202
  elif response.status_code == 401:
191
203
  logger.warning("Received 401 error. Attempting to refresh token.")
192
204
  token = RagaAICatalyst.get_token(force_refresh=True)
@@ -206,16 +218,17 @@ class UploadAgenticTraces:
206
218
  logger.debug(
207
219
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
208
220
  )
209
- if response.status_code != 200:
210
- print(f"Error inserting traces: {response.json()['message']}")
211
- return False
221
+ if response.status_code in [200, 201]:
222
+ logger.debug("Successfully inserted traces")
223
+ return True
212
224
  else:
213
- print("Error while inserting traces")
225
+ logger.debug("Error while inserting traces")
214
226
  return False
215
227
  else:
216
- return True
228
+ logger.debug("Error while inserting traces")
229
+ return False
217
230
  except requests.exceptions.RequestException as e:
218
- print(f"Error while inserting traces: {e}")
231
+ logger.debug(f"Error while inserting traces: {e}")
219
232
  return None
220
233
 
221
234
  def _get_dataset_spans(self):
@@ -223,7 +236,7 @@ class UploadAgenticTraces:
223
236
  with open(self.json_file_path) as f:
224
237
  data = json.load(f)
225
238
  except Exception as e:
226
- print(f"Error while reading file: {e}")
239
+ logger.debug(f"Error while reading file: {e}")
227
240
  return None
228
241
  try:
229
242
  spans = data["data"][0]["spans"]
@@ -245,40 +258,41 @@ class UploadAgenticTraces:
245
258
  continue
246
259
  return dataset_spans
247
260
  except Exception as e:
248
- print(f"Error while reading dataset spans: {e}")
261
+ logger.debug(f"Error while reading dataset spans: {e}")
249
262
  return None
250
263
 
251
264
  def upload_agentic_traces(self):
252
265
  try:
253
266
  presigned_url = self._get_presigned_url()
254
267
  if presigned_url is None:
255
- print("Warning: Failed to obtain presigned URL")
268
+ logger.debug("Warning: Failed to obtain presigned URL")
256
269
  return False
257
270
 
258
271
  # Upload the file using the presigned URL
259
272
  upload_result = self._put_presigned_url(presigned_url, self.json_file_path)
260
273
  if not upload_result:
261
- print("Error: Failed to upload file to presigned URL")
274
+ logger.debug("Error: Failed to upload file to presigned URL")
262
275
  return False
263
276
  elif isinstance(upload_result, tuple):
264
277
  response, status_code = upload_result
265
278
  if status_code not in [200, 201]:
266
- print(
279
+ logger.debug(
267
280
  f"Error: Upload failed with status code {status_code}: {response.text if hasattr(response, 'text') else 'Unknown error'}")
268
281
  return False
269
282
  # Insert trace records
270
283
  insert_success = self.insert_traces(presigned_url)
271
284
  if not insert_success:
272
- print("Error: Failed to insert trace records")
285
+ logger.debug("Error: Failed to insert trace records")
273
286
  return False
274
287
 
275
- print("Successfully uploaded agentic traces")
288
+ logger.debug("Successfully uploaded agentic traces")
276
289
  return True
277
290
  except FileNotFoundError:
278
- print(f"Error: Trace file not found at {self.json_file_path}")
291
+ logger.debug(f"Error: Trace file not found at {self.json_file_path}")
279
292
  return False
280
293
  except ConnectionError as e:
281
- print(f"Error: Network connection failed while uploading traces: {e}")
294
+ logger.debug(f"Error: Network connection failed while uploading traces: {e}")
282
295
  return False
283
296
  except Exception as e:
284
- print(f"Error while uploading agentic traces: {e}")
297
+ logger.debug(f"Error while uploading agentic traces: {e}")
298
+ return False
@@ -19,7 +19,7 @@ from ragaai_catalyst.tracers.agentic_tracing.utils.zip_list_of_unique_files impo
19
19
  )
20
20
  from ragaai_catalyst.tracers.utils.trace_json_converter import convert_json_format
21
21
 
22
- logger = logging.getLogger("RagaAICatalyst")
22
+ logger = logging.getLogger(__name__)
23
23
  logging_level = (
24
24
  logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
25
25
  )
@@ -46,6 +46,7 @@ class RAGATraceExporter(SpanExporter):
46
46
  self.external_id = external_id
47
47
 
48
48
  def export(self, spans):
49
+ logger.debug("Starting export of span: ")
49
50
  for span in spans:
50
51
  try:
51
52
  span_json = json.loads(span.to_json())
@@ -60,8 +61,11 @@ class RAGATraceExporter(SpanExporter):
60
61
 
61
62
  if span_json["parent_id"] is None:
62
63
  trace = self.trace_spans[trace_id]
64
+ logger.debug("End of trace found: ")
63
65
  try:
66
+ logger.debug("Started processing complete trace: ")
64
67
  self.process_complete_trace(trace, trace_id)
68
+ logger.debug("Completed processing complete trace: ")
65
69
  except Exception as e:
66
70
  logger.error(f"Error processing complete trace: {e}")
67
71
  try:
@@ -84,9 +88,11 @@ class RAGATraceExporter(SpanExporter):
84
88
  def process_complete_trace(self, spans, trace_id):
85
89
  # Convert the trace to ragaai trace format
86
90
  try:
91
+ logger.debug("Started preparing trace to trace format: ")
87
92
  ragaai_trace_details = self.prepare_trace(spans, trace_id)
93
+ logger.debug("Completed preparing trace to trace format: ")
88
94
  except Exception as e:
89
- print(f"Error converting trace {trace_id}: {e}")
95
+ logger.error(f"Error preparing trace {trace_id}: {e}")
90
96
  return # Exit early if conversion fails
91
97
 
92
98
  # Check if trace details are None (conversion failed)
@@ -96,89 +102,109 @@ class RAGATraceExporter(SpanExporter):
96
102
 
97
103
  # Upload the trace if upload_trace function is provided
98
104
  try:
105
+ logger.debug("Started uploading trace: ")
99
106
  if self.post_processor!=None:
100
107
  ragaai_trace_details['trace_file_path'] = self.post_processor(ragaai_trace_details['trace_file_path'])
101
108
  self.upload_trace(ragaai_trace_details, trace_id)
109
+ logger.debug("Completed uploading trace: ")
102
110
  except Exception as e:
103
- print(f"Error uploading trace {trace_id}: {e}")
111
+ logger.error(f"Error uploading trace {trace_id}: {e}")
104
112
 
105
113
  def prepare_trace(self, spans, trace_id):
106
114
  try:
107
115
  try:
116
+ logger.debug("Started converting trace to trace format: ")
108
117
  ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,self.external_id)
118
+ logger.debug("Completed converting trace to trace format: ")
109
119
  except Exception as e:
110
- print(f"Error in convert_json_format function: {trace_id}: {e}")
120
+ logger.error(f"Error in convert_json_format function: {trace_id}: {e}")
111
121
  return None
112
122
 
113
123
  try:
124
+ logger.debug("Started formatting interactions: ")
114
125
  interactions = format_interactions(ragaai_trace)
115
126
  ragaai_trace["workflow"] = interactions['workflow']
127
+ logger.debug("Completed formatting interactions: ")
116
128
  except Exception as e:
117
- print(f"Error in format_interactions function: {trace_id}: {e}")
129
+ logger.error(f"Error in format_interactions function: {trace_id}: {e}")
118
130
  return None
119
131
 
120
132
  try:
133
+ logger.debug("Started zipping source code: ")
121
134
  # Add source code hash
122
135
  hash_id, zip_path = zip_list_of_unique_files(
123
136
  self.files_to_zip, output_dir=self.tmp_dir
124
137
  )
138
+ logger.debug("Completed zipping source code: ")
125
139
  except Exception as e:
126
- print(f"Error in zip_list_of_unique_files function: {trace_id}: {e}")
140
+ logger.error(f"Error in zip_list_of_unique_files function: {trace_id}: {e}")
127
141
  return None
128
142
 
129
143
  try:
144
+ logger.debug("Started adding system info: ")
130
145
  ragaai_trace["metadata"]["system_info"] = asdict(self.system_monitor.get_system_info())
131
146
  ragaai_trace["metadata"]["resources"] = asdict(self.system_monitor.get_resources())
147
+ logger.debug("Completed adding system info: ")
132
148
  except Exception as e:
133
- print(f"Error in get_system_info or get_resources function: {trace_id}: {e}")
149
+ logger.error(f"Error in get_system_info or get_resources function: {trace_id}: {e}")
134
150
  return None
135
151
 
136
152
  try:
153
+ logger.debug("Started adding source code hash: ")
137
154
  ragaai_trace["metadata"]["system_info"]["source_code"] = hash_id
155
+ logger.debug("Completed adding source code hash: ")
138
156
  except Exception as e:
139
- print(f"Error in adding source code hash: {trace_id}: {e}")
157
+ logger.error(f"Error in adding source code hash: {trace_id}: {e}")
140
158
  return None
141
159
 
142
160
  try:
143
161
  ragaai_trace["data"][0]["start_time"] = ragaai_trace["start_time"]
144
162
  ragaai_trace["data"][0]["end_time"] = ragaai_trace["end_time"]
145
163
  except Exception as e:
146
- print(f"Error in adding start_time or end_time: {trace_id}: {e}")
164
+ logger.error(f"Error in adding start_time or end_time: {trace_id}: {e}")
147
165
  return None
148
166
 
149
167
  try:
168
+ logger.debug("Started adding project name: ")
150
169
  ragaai_trace["project_name"] = self.project_name
170
+ logger.debug("Completed adding project name: ")
151
171
  except Exception as e:
152
- print(f"Error in adding project name: {trace_id}: {e}")
172
+ logger.error(f"Error in adding project name: {trace_id}: {e}")
153
173
  return None
154
174
 
155
175
  try:
176
+ logger.debug("Started adding tracer type: ")
156
177
  # Add tracer type to the trace
157
178
  ragaai_trace["tracer_type"] = self.tracer_type
179
+ logger.debug("Completed adding tracer type: ")
158
180
  except Exception as e:
159
- print(f"Error in adding tracer type: {trace_id}: {e}")
181
+ logger.error(f"Error in adding tracer type: {trace_id}: {e}")
160
182
  return None
161
183
 
162
184
  #Add user passed metadata to the trace
163
185
  try:
186
+ logger.debug("Started adding user passed metadata: ")
164
187
  if self.user_details.get("trace_user_detail").get("metadata") and isinstance(self.user_details.get("trace_user_detail").get("metadata"), dict):
165
188
  for key, value in self.user_details.get("trace_user_detail").get("metadata").items():
166
189
  if key in ["log_source", "recorded_on"]:
167
190
  continue
168
191
  ragaai_trace["metadata"][key] = value
192
+ logger.debug("Completed adding user passed metadata: ")
169
193
  except Exception as e:
170
- print(f"Error in adding metadata: {trace_id}: {e}")
194
+ logger.error(f"Error in adding metadata: {trace_id}: {e}")
171
195
  return None
172
196
 
173
197
  try:
198
+ logger.debug("Started saving trace json: ")
174
199
  # Save the trace_json
175
200
  trace_file_path = os.path.join(self.tmp_dir, f"{trace_id}.json")
176
201
  with open(trace_file_path, "w") as file:
177
202
  json.dump(ragaai_trace, file, cls=TracerJSONEncoder, indent=2)
178
203
  with open(os.path.join(os.getcwd(), 'rag_agent_traces.json'), 'w') as f:
179
204
  json.dump(ragaai_trace, f, cls=TracerJSONEncoder, indent=2)
205
+ logger.debug("Completed saving trace json: ")
180
206
  except Exception as e:
181
- print(f"Error in saving trace json: {trace_id}: {e}")
207
+ logger.error(f"Error in saving trace json: {trace_id}: {e}")
182
208
  return None
183
209
 
184
210
  return {
@@ -187,7 +213,7 @@ class RAGATraceExporter(SpanExporter):
187
213
  'hash_id': hash_id
188
214
  }
189
215
  except Exception as e:
190
- print(f"Error converting trace {trace_id}: {str(e)}")
216
+ logger.error(f"Error converting trace {trace_id}: {str(e)}")
191
217
  return None
192
218
 
193
219
  def upload_trace(self, ragaai_trace_details, trace_id):
@@ -127,9 +127,11 @@ class Tracer(AgenticTracing):
127
127
  self.model_custom_cost = {}
128
128
  super().__init__(user_detail=user_detail, auto_instrumentation=auto_instrumentation)
129
129
 
130
+ logger.debug(f"Setting up Tracer instance with project: {project_name}, dataset: {dataset_name}")
130
131
  self.project_name = project_name
131
132
  self.dataset_name = dataset_name
132
133
  self.tracer_type = tracer_type
134
+ logger.debug(f"Tracer type set to: {tracer_type}")
133
135
  self.metadata = self._improve_metadata(metadata, tracer_type)
134
136
  # self.metadata["total_cost"] = 0.0
135
137
  # self.metadata["total_tokens"] = 0
@@ -146,12 +148,16 @@ class Tracer(AgenticTracing):
146
148
  self.file_tracker = TrackName()
147
149
  self.post_processor = None
148
150
  self.max_upload_workers = max_upload_workers
151
+ logger.debug(f"Max upload workers: {self.max_upload_workers}")
149
152
  self.user_details = self._pass_user_data()
150
153
  self.update_llm_cost = update_llm_cost
151
154
  self.auto_instrumentation = auto_instrumentation
152
155
  self.external_id = external_id
156
+ if external_id:
157
+ logger.debug(f"External ID provided: {external_id}")
153
158
 
154
159
  try:
160
+ logger.debug(f"Fetching projects list from {self.base_url}/v2/llm/projects")
155
161
  response = requests.get(
156
162
  f"{self.base_url}/v2/llm/projects?size={self.num_projects}",
157
163
  headers={
@@ -165,8 +171,13 @@ class Tracer(AgenticTracing):
165
171
  project_list = [
166
172
  project["name"] for project in response.json()["data"]["content"]
167
173
  ]
174
+ logger.debug(f"Found {len(project_list)} projects")
168
175
  if project_name not in project_list:
169
- logger.error("Project not found. Please enter a valid project name")
176
+ logger.warning(f"Project '{project_name}' not found in available projects")
177
+ logger.debug(f"Available projects: {project_list}")
178
+ else:
179
+ logger.debug(f"Project '{project_name}' found in available projects")
180
+
170
181
 
171
182
  self.project_id = [
172
183
  project["id"] for project in response.json()["data"]["content"] if project["name"] == project_name
@@ -177,6 +188,7 @@ class Tracer(AgenticTracing):
177
188
 
178
189
  except requests.exceptions.RequestException as e:
179
190
  logger.error(f"Failed to retrieve projects list: {e}")
191
+ logger.debug(f"Request exception details: {str(e)}, URL: {self.base_url}/v2/llm/projects")
180
192
 
181
193
  # if tracer_type == "langchain":
182
194
  # instrumentors = []
@@ -191,13 +203,16 @@ class Tracer(AgenticTracing):
191
203
  # from openinference.instrumentation.langchain import LangChainInstrumentor
192
204
  # instrumentors += [(LangChainInstrumentor, [])]
193
205
  # self._setup_agentic_tracer(instrumentors)
206
+
194
207
  # Handle agentic tracers
208
+ logger.debug(f"Starting Instrumentation for tracer type: {tracer_type}")
195
209
  if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain":
196
210
  # Setup instrumentors based on tracer type
197
211
  instrumentors = []
198
212
 
199
213
  # Add LLM Instrumentors
200
214
  if tracer_type in ['agentic/crewai']:
215
+ logger.debug("Setting up instrumentors for CrewAI")
201
216
  try:
202
217
  from openinference.instrumentation.vertexai import VertexAIInstrumentor
203
218
  instrumentors.append((VertexAIInstrumentor, []))
@@ -310,31 +325,38 @@ class Tracer(AgenticTracing):
310
325
  elif tracer_type == "agentic/llamaindex" or tracer_type == "llamaindex":
311
326
  from openinference.instrumentation.llama_index import LlamaIndexInstrumentor
312
327
  instrumentors += [(LlamaIndexInstrumentor, [])]
328
+ logger.info("Instrumenting LlamaIndex...")
313
329
 
314
330
  elif tracer_type == "agentic/langchain" or tracer_type == "agentic/langgraph" or tracer_type == "langchain":
315
331
  from openinference.instrumentation.langchain import LangChainInstrumentor
316
332
  instrumentors += [(LangChainInstrumentor, [])]
333
+ logger.info("Instrumenting LangChain...")
317
334
 
318
335
  elif tracer_type == "agentic/crewai":
319
336
  from openinference.instrumentation.crewai import CrewAIInstrumentor
320
337
  from openinference.instrumentation.langchain import LangChainInstrumentor
321
338
  instrumentors += [(CrewAIInstrumentor, []), (LangChainInstrumentor, [])]
339
+ logger.info("Instrumenting CrewAI...")
322
340
 
323
341
  elif tracer_type == "agentic/haystack":
324
342
  from openinference.instrumentation.haystack import HaystackInstrumentor
325
343
  instrumentors += [(HaystackInstrumentor, [])]
344
+ logger.info("Instrumenting Haystack...")
326
345
 
327
346
  elif tracer_type == "agentic/autogen":
328
347
  from openinference.instrumentation.autogen import AutogenInstrumentor
329
348
  instrumentors += [(AutogenInstrumentor, [])]
349
+ logger.info("Instrumenting Autogen...")
330
350
 
331
351
  elif tracer_type == "agentic/smolagents":
332
352
  from openinference.instrumentation.smolagents import SmolagentsInstrumentor
333
353
  instrumentors += [(SmolagentsInstrumentor, [])]
354
+ logger.info("Instrumenting Smolagents...")
334
355
 
335
356
  elif tracer_type == "agentic/openai_agents":
336
357
  from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
337
358
  instrumentors += [(OpenAIAgentsInstrumentor, [])]
359
+ logger.info("Instrumenting OpenAI Agents...")
338
360
 
339
361
  else:
340
362
  # Unknown agentic tracer type
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ragaai_catalyst
3
- Version: 2.2.4b5
3
+ Version: 2.2.4.1b2
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
6
6
  Requires-Python: <=3.13.2,>=3.10
@@ -31,7 +31,7 @@ ragaai_catalyst/tracers/distributed.py,sha256=CGPuOh4CsgEk428PPibieLaAG2Tt3BVygF
31
31
  ragaai_catalyst/tracers/langchain_callback.py,sha256=CB75zzG3-DkYTELj0vI1MOHQTY0MuQJfoHIXz9Cl8S8,34568
32
32
  ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
33
33
  ragaai_catalyst/tracers/llamaindex_instrumentation.py,sha256=Ys_jLkvVqo12bKgXDmkp4TxJu9HkBATrFE8cIcTYxWw,14329
34
- ragaai_catalyst/tracers/tracer.py,sha256=hxVJN45CtIeU8Dc5G1kreXJa7Vv_3buBAHVz-Q3buKo,41435
34
+ ragaai_catalyst/tracers/tracer.py,sha256=2JmeM9OkxNyS6XG0Ogqhx8q9lvy3vNLJnEVH2Vg2RNM,42768
35
35
  ragaai_catalyst/tracers/upload_traces.py,sha256=w1clGGfdOMpStUJX40NAlxe6dcFdN4pwcezyII0bGYA,6994
36
36
  ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
37
37
  ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
@@ -55,7 +55,7 @@ ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=xxrliKPfdf
55
55
  ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py,sha256=bhSUhNQCuJXKjgJAXhjKEYjnHMpYN90FSZdR84fNIKU,4614
56
56
  ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
57
  ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=iMUMFR9XVipCBunpv8_No8bCoP3lqG47M5dg-ugibWo,21006
58
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=t3spo5w7TyfR0Zeqm1h5Z-bJ-BlZ3EPGTvRdK5lpFpE,11705
58
+ ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=pMaZdbKzGnT_BTP3ib_SVkGJsx4_Tz67BOVkoqtx7xY,12650
59
59
  ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=2mxdi7k_SoDqQUFo1oQ__28CpmSIvVugYcbuRltUK9Q,9920
60
60
  ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py,sha256=m1O8lKpxKwtHofXLW3fTHX5yfqDW5GxoveARlg5cTw4,2571
61
61
  ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=XdB3X_ufe4RVvGorxSqAiB9dYv4UD7Hvvuw3bsDUppY,60
@@ -76,7 +76,7 @@ ragaai_catalyst/tracers/exporters/__init__.py,sha256=wQbaqyeIjVZxYprHCKZ9BeiqxeX
76
76
  ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=Rm-QaLv1qMAKpHKcFOcK_HWaKHwFBoUH45_4QYipE-g,6843
77
77
  ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=NZsD3rShUiC3rO9y3Y2vqEtS3MO51FXZy0p3q9cdDNY,6403
78
78
  ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=l-RfysTIXYxtvYkVlJbRvg-AzJbT4Fdb-YiZh0mfuDs,17868
79
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=VLvlWFRFPhE32WrF-_J_vCczduz13WAcOW8MKDgDYJc,8979
79
+ ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=H6nQBjUTyraPxSPj8y71sslng3yz_y-LbbmbX3FDmRM,10758
80
80
  ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
81
81
  ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
82
82
  ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=SehrD7q8ytAiUYoWr406b4mWs3Lk0Rcy6Ekkihh22TI,1703
@@ -88,8 +88,8 @@ ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py,sha256=3ygkRT__lLDRf
88
88
  ragaai_catalyst/tracers/utils/rag_trace_json_converter.py,sha256=54IEZO-YRjUAahV5nw8KClXqTF1LhfDry_TsZ4KGow4,20467
89
89
  ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=-HZVmijeUFLO7e9OAvi1RJdWVTxPRUHPd1MkKQlCD54,11785
90
90
  ragaai_catalyst/tracers/utils/utils.py,sha256=o-p9n2ZuophdrV0wrixu-BqRHCkovup_klc3mS8mU8g,2374
91
- ragaai_catalyst-2.2.4b5.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
92
- ragaai_catalyst-2.2.4b5.dist-info/METADATA,sha256=Q7PtQXhDh171npPl7svseoiSvBfEcNevud0mDWqHx0g,17679
93
- ragaai_catalyst-2.2.4b5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
94
- ragaai_catalyst-2.2.4b5.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
95
- ragaai_catalyst-2.2.4b5.dist-info/RECORD,,
91
+ ragaai_catalyst-2.2.4.1b2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
92
+ ragaai_catalyst-2.2.4.1b2.dist-info/METADATA,sha256=cy7x1DGQMU6DB3k9hHnh8gGxFatGgm-664RTigK2ScE,17681
93
+ ragaai_catalyst-2.2.4.1b2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
94
+ ragaai_catalyst-2.2.4.1b2.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
95
+ ragaai_catalyst-2.2.4.1b2.dist-info/RECORD,,