ragaai-catalyst 2.2.5b4__py3-none-any.whl → 2.2.5b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -240,12 +240,15 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
240
240
  user_details=user_details,
241
241
  timeout=timeout
242
242
  )
243
- logger.info(f"Dataset schema created: {response}")
244
243
 
245
- # Cache the response only if status code is 200
246
- if response and hasattr(response, 'status_code') and response.status_code in [200, 201]:
244
+ if response is None:
245
+ logger.error(f"Dataset schema creation failed for {dataset_name} - received None response")
246
+ elif hasattr(response, 'status_code') and response.status_code in [200, 201]:
247
+ logger.info(f"Dataset schema created successfully: {response.status_code}")
247
248
  _cache_dataset_creation(cache_key, response)
248
249
  logger.info(f"Response cached successfully for dataset: {dataset_name} and key: {cache_key}")
250
+ else:
251
+ logger.warning(f"Dataset schema creation returned unexpected response: {response}")
249
252
 
250
253
  except Exception as e:
251
254
  logger.error(f"Error creating dataset schema: {e}")
@@ -478,7 +481,8 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
478
481
 
479
482
  return task_id
480
483
  except RuntimeError as e:
481
- if "cannot schedule new futures after shutdown" in str(e):
484
+ if any(msg in str(e) for msg in
485
+ ("cannot schedule new futures after shutdown", "cannot schedule new futures after interpreter shutdown")):
482
486
  logger.warning(f"Executor already shut down, falling back to synchronous processing: {e}")
483
487
  return do_sync_processing()
484
488
  else:
@@ -9,8 +9,6 @@ from requests.exceptions import ConnectionError, Timeout, RequestException
9
9
  from http.client import RemoteDisconnected
10
10
  from .session_manager import session_manager
11
11
 
12
- import requests
13
-
14
12
  logger = logging.getLogger(__name__)
15
13
 
16
14
  from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
@@ -64,8 +62,34 @@ class UploadAgenticTraces:
64
62
  presignedURLs = response.json()["data"]["presignedUrls"][0]
65
63
  presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
66
64
  return presignedurl
65
+ elif response.status_code == 401:
66
+ logger.warning("Received 401 error while getting presign url. Attempting to refresh token.")
67
+ token = RagaAICatalyst.get_token(force_refresh=True)
68
+ headers = {
69
+ "Content-Type": "application/json",
70
+ "Authorization": f"Bearer {token}",
71
+ "X-Project-Name": self.project_name,
72
+ }
73
+ response = session_manager.make_request_with_retry(
74
+ "POST", endpoint, headers=headers, data=payload, timeout=self.timeout
75
+ )
76
+ elapsed_ms = (time.time() - start_time) * 1000
77
+ logger.debug(
78
+ f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
79
+ )
80
+ if response.status_code == 200:
81
+ presignedURLs = response.json()["data"]["presignedUrls"][0]
82
+ presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
83
+ return presignedurl
84
+ else:
85
+ logger.error(
86
+ f"Error while getting presigned url after token refresh: {response.json()['message']}"
87
+ )
88
+ return None
67
89
  else:
68
90
  # If POST fails, try GET
91
+ logger.warning(
92
+ f"POST request failed for getting presign url with status{response.status_code}.Falling back to GET request.")
69
93
  response = session_manager.make_request_with_retry(
70
94
  "GET", endpoint, headers=headers, data=payload, timeout=self.timeout
71
95
  )
@@ -80,7 +104,7 @@ class UploadAgenticTraces:
80
104
  )
81
105
  return presignedurl
82
106
  elif response.status_code == 401:
83
- logger.warning("Received 401 error. Attempting to refresh token.")
107
+ logger.warning("Received 401 error while getting presign url. Attempting to refresh token.")
84
108
  token = RagaAICatalyst.get_token(force_refresh=True)
85
109
  headers = {
86
110
  "Content-Type": "application/json",
@@ -106,7 +130,7 @@ class UploadAgenticTraces:
106
130
  return presignedurl
107
131
  else:
108
132
  logger.error(
109
- f"Error while getting presigned url: {response.json()['message']}"
133
+ f"Error while getting presigned url after token refresh: {response.json()['message']}"
110
134
  )
111
135
  return None
112
136
  else:
@@ -8,8 +8,6 @@ from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
8
8
  from requests.exceptions import ConnectionError, Timeout, RequestException
9
9
  from http.client import RemoteDisconnected
10
10
 
11
- import requests
12
-
13
11
  from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
14
12
  from .session_manager import session_manager
15
13
 
@@ -106,7 +104,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None, timeou
106
104
  session_manager.handle_request_exceptions(e, "fetching dataset code hashes")
107
105
  return None
108
106
  except RequestException as e:
109
- logger.error(f"Failed to list datasets: {e}")
107
+ logger.error(f"Failed to fetch dataset code hashes: {e}")
110
108
  return None
111
109
 
112
110
 
@@ -142,7 +140,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
142
140
  start_time = time.time()
143
141
  # Changed to POST from GET
144
142
  endpoint = f"{url_base}/v1/llm/presigned-url"
145
- response = requests.request(
143
+ response = session_manager.make_request_with_retry(
146
144
  "POST", endpoint, headers=headers, data=payload, timeout=timeout
147
145
  )
148
146
  elapsed_ms = (time.time() - start_time) * 1000
@@ -150,20 +148,20 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
150
148
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
151
149
  )
152
150
 
153
- if response.status_code == 200:
151
+ if response.status_code in [200, 201]:
154
152
  presigned_url = response.json()["data"]["presignedUrls"][0]
155
153
  presigned_url = update_presigned_url(presigned_url, url_base)
156
154
  return presigned_url
157
155
  else:
158
156
  # If POST fails, try GET
159
- response = requests.request(
157
+ response = session_manager.make_request_with_retry(
160
158
  "POST", endpoint, headers=headers, data=payload, timeout=timeout
161
159
  )
162
160
  elapsed_ms = (time.time() - start_time) * 1000
163
161
  logger.debug(
164
162
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
165
163
  )
166
- if response.status_code == 200:
164
+ if response.status_code in [200, 201]:
167
165
  presigned_url = response.json()["data"]["presignedUrls"][0]
168
166
  presigned_url = update_presigned_url(presigned_url, url_base)
169
167
  return presigned_url
@@ -175,7 +173,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
175
173
  "Content-Type": "application/json",
176
174
  "X-Project-Name": project_name,
177
175
  }
178
- response = requests.request(
176
+ response = session_manager.make_request_with_retry(
179
177
  "POST", endpoint, headers=headers, data=payload, timeout=timeout
180
178
  )
181
179
  elapsed_ms = (time.time() - start_time) * 1000
@@ -188,15 +186,18 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
188
186
  return presigned_url
189
187
  else:
190
188
  logger.error(
191
- f"Failed to fetch code hashes: {response.json()['message']}"
189
+ f"Failed to fetch presigned URL for code upload after 401: {response.json()['message']}"
192
190
  )
193
191
  else:
194
192
  logger.error(
195
- f"Failed to fetch code hashes: {response.json()['message']}"
193
+ f"Failed to fetch presigned URL for code upload: {response.json()['message']}"
196
194
  )
197
195
  return None
198
- except requests.exceptions.RequestException as e:
199
- logger.error(f"Failed to list datasets: {e}")
196
+ except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout, RemoteDisconnected) as e:
197
+ session_manager.handle_request_exceptions(e, "fetching presigned URL for code upload")
198
+ return None
199
+ except RequestException as e:
200
+ logger.error(f"Failed to fetch presigned URL for code upload: {e}")
200
201
  return None
201
202
 
202
203
 
@@ -1,43 +1,100 @@
1
1
  import os
2
2
  import json
3
- import re
4
3
  import requests
4
+ import logging
5
+ import time
6
+ from typing import Optional
7
+ from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
8
+ from requests.exceptions import ConnectionError, Timeout, RequestException
9
+ from http.client import RemoteDisconnected
10
+
5
11
  from ragaai_catalyst import RagaAICatalyst
12
+ from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
13
+
14
+ IGNORED_KEYS = {"log_source", "recorded_on"}
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def create_dataset_schema_with_trace(
18
+ project_name: str,
19
+ dataset_name: str,
20
+ base_url: Optional[str] = None,
21
+ user_details: Optional[dict] = None,
22
+ timeout: int = 120) -> requests.Response:
23
+ schema_mapping = {}
6
24
 
7
- def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, user_details=None, timeout=120):
8
- SCHEMA_MAPPING = {}
9
- metadata = user_details.get("trace_user_detail").get("metadata")
10
- if metadata and isinstance(metadata, dict):
25
+ metadata = (
26
+ user_details.get("trace_user_detail", {}).get("metadata", {})
27
+ if user_details else {}
28
+ )
29
+ if isinstance(metadata, dict):
11
30
  for key, value in metadata.items():
12
- if key in ["log_source", "recorded_on"]:
31
+ if key in IGNORED_KEYS:
13
32
  continue
14
- SCHEMA_MAPPING[key] = {"columnType": "metadata"}
15
-
16
- def make_request():
17
- headers = {
18
- "Content-Type": "application/json",
19
- "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
20
- "X-Project-Name": project_name,
21
- }
22
- if SCHEMA_MAPPING:
23
- payload = json.dumps({
24
- "datasetName": dataset_name,
25
- "traceFolderUrl": None,
26
- "schemaMapping": SCHEMA_MAPPING
27
- })
28
- else:
29
- payload = json.dumps({
30
- "datasetName": dataset_name,
31
- "traceFolderUrl": None,
32
- })
33
+ schema_mapping[key] = {"columnType": "metadata"}
34
+
35
+ headers = {
36
+ "Content-Type": "application/json",
37
+ "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
38
+ "X-Project-Name": project_name,
39
+ }
40
+
41
+ if schema_mapping:
42
+ payload = json.dumps({
43
+ "datasetName": dataset_name,
44
+ "traceFolderUrl": None,
45
+ "schemaMapping": schema_mapping
46
+ })
47
+ else:
48
+ payload = json.dumps({
49
+ "datasetName": dataset_name,
50
+ "traceFolderUrl": None,
51
+ })
52
+
53
+ try:
33
54
  # Use provided base_url or fall back to default
34
55
  url_base = base_url if base_url is not None else RagaAICatalyst.BASE_URL
35
- response = requests.request("POST",
36
- f"{url_base}/v1/llm/dataset/logs",
37
- headers=headers,
38
- data=payload,
39
- timeout=timeout
56
+ start_time = time.time()
57
+ endpoint = f"{url_base}/v1/llm/dataset/logs"
58
+
59
+ response = session_manager.make_request_with_retry(
60
+ "POST", endpoint, headers=headers, data=payload, timeout=timeout
61
+ )
62
+
63
+ elapsed_ms = (time.time() - start_time) * 1000
64
+ logger.debug(
65
+ f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
40
66
  )
41
- return response
42
- response = make_request()
43
- return response
67
+
68
+ if response.status_code in [200, 201]:
69
+ logger.info(f"Dataset schema created successfully: {response.status_code}")
70
+ return response
71
+ elif response.status_code == 401:
72
+ logger.warning("Received 401 error during dataset schema creation. Attempting to refresh token.")
73
+ RagaAICatalyst.get_token(force_refresh=True)
74
+ headers = {
75
+ "Content-Type": "application/json",
76
+ "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
77
+ "X-Project-Name": project_name,
78
+ }
79
+ response = session_manager.make_request_with_retry(
80
+ "POST", endpoint, headers=headers, data=payload, timeout=timeout
81
+ )
82
+ elapsed_ms = (time.time() - start_time) * 1000
83
+ logger.debug(
84
+ f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
85
+ )
86
+ if response.status_code in [200, 201]:
87
+ logger.info(f"Dataset schema created successfully after 401: {response.status_code}")
88
+ return response
89
+ else:
90
+ logger.error(f"Failed to create dataset schema after 401: {response.status_code}")
91
+ return None
92
+ else:
93
+ logger.error(f"Failed to create dataset schema: {response.status_code}")
94
+ return None
95
+ except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout, RemoteDisconnected) as e:
96
+ session_manager.handle_request_exceptions(e, "creating dataset schema")
97
+ return None
98
+ except RequestException as e:
99
+ logger.error(f"Failed to create dataset schema: {e}")
100
+ return None
@@ -7,6 +7,8 @@ import re
7
7
  import ast
8
8
  import importlib.util
9
9
  import json
10
+ from typing import List, Optional, Tuple
11
+
10
12
  import ipynbname
11
13
  from copy import deepcopy
12
14
 
@@ -460,8 +462,14 @@ class TraceDependencyTracker:
460
462
  logger.debug(f"Zip file created successfully at: {zip_filename}")
461
463
  return hash_id, zip_filename
462
464
 
463
- def zip_list_of_unique_files(filepaths, output_dir=None):
465
+ def zip_list_of_unique_files(
466
+ filepaths: List[str],
467
+ output_dir: Optional[str] = None
468
+ ) -> Tuple[str, str]:
464
469
  """Create a zip file containing all unique files and their dependencies."""
470
+ if not filepaths:
471
+ logger.warning("The filepaths list is empty. Proceeding with an empty ZIP archive.")
472
+ filepaths = []
465
473
  if output_dir is None:
466
474
  # Set default output directory based on environment
467
475
  if JupyterNotebookHandler.is_running_in_colab():
@@ -4,17 +4,34 @@ Dynamic Trace Exporter - A wrapper for RAGATraceExporter that allows dynamic upd
4
4
  import logging
5
5
  from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
6
6
  from ragaai_catalyst.tracers.exporters.ragaai_trace_exporter import RAGATraceExporter
7
+ from typing import Optional, List, Dict, Callable
7
8
 
8
9
  logger = logging.getLogger("RagaAICatalyst")
9
10
 
11
+
10
12
  class DynamicTraceExporter(SpanExporter):
11
13
  """
12
14
  A wrapper around RAGATraceExporter that allows dynamic updates to properties.
13
15
  This exporter forwards all calls to the underlying RAGATraceExporter but allows
14
16
  certain properties to be updated dynamically during execution.
15
17
  """
16
-
17
- def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
18
+ def __init__(
19
+ self,
20
+ project_name: str,
21
+ dataset_name: str,
22
+ base_url: str,
23
+ tracer_type: str,
24
+ files_to_zip: Optional[List[str]] = None,
25
+ project_id: Optional[str] = None,
26
+ user_details: Optional[Dict] = None,
27
+ custom_model_cost: Optional[dict] = None,
28
+ timeout: int = 120,
29
+ post_processor: Optional[Callable] = None,
30
+ max_upload_workers: int = 30,
31
+ user_context: Optional[str] = None,
32
+ user_gt: Optional[str] = None,
33
+ external_id: Optional[str] = None
34
+ ):
18
35
  """
19
36
  Initialize the DynamicTraceExporter.
20
37
 
@@ -30,22 +47,22 @@ class DynamicTraceExporter(SpanExporter):
30
47
  max_upload_workers: Maximum number of upload workers
31
48
  """
32
49
  self._exporter = RAGATraceExporter(
50
+ project_name=project_name,
51
+ dataset_name=dataset_name,
52
+ base_url=base_url,
33
53
  tracer_type=tracer_type,
34
54
  files_to_zip=files_to_zip,
35
- project_name=project_name,
36
55
  project_id=project_id,
37
- dataset_name=dataset_name,
38
56
  user_details=user_details,
39
- base_url=base_url,
40
57
  custom_model_cost=custom_model_cost,
41
58
  timeout=timeout,
42
- post_processor= post_processor,
43
- max_upload_workers = max_upload_workers,
44
- user_context = user_context,
45
- user_gt = user_gt,
59
+ post_processor=post_processor,
60
+ max_upload_workers=max_upload_workers,
61
+ user_context=user_context,
62
+ user_gt=user_gt,
46
63
  external_id=external_id
47
64
  )
48
-
65
+
49
66
  # Store the initial values
50
67
  self._files_to_zip = files_to_zip
51
68
  self._project_name = project_name
@@ -60,7 +77,6 @@ class DynamicTraceExporter(SpanExporter):
60
77
  self._user_gt = user_gt
61
78
  self._external_id = external_id
62
79
 
63
-
64
80
  def export(self, spans):
65
81
  """
66
82
  Export spans by forwarding to the underlying exporter.
@@ -84,8 +100,6 @@ class DynamicTraceExporter(SpanExporter):
84
100
  return result
85
101
  except Exception as e:
86
102
  logger.error(f"Error exporting trace: {e}")
87
-
88
-
89
103
 
90
104
  def shutdown(self):
91
105
  """
@@ -103,7 +117,7 @@ class DynamicTraceExporter(SpanExporter):
103
117
  return self._exporter.shutdown()
104
118
  except Exception as e:
105
119
  logger.error(f"Error shutting down exporter: {e}")
106
-
120
+
107
121
  def _update_exporter_properties(self):
108
122
  """
109
123
  Update the underlying exporter's properties with the current values.
@@ -118,55 +132,55 @@ class DynamicTraceExporter(SpanExporter):
118
132
  self._exporter.post_processor = self._post_processor
119
133
  self._exporter.max_upload_workers = self._max_upload_workers
120
134
  self._exporter.user_context = self._user_context
121
- self._exporter.user_gt = self._user_gt
135
+ self._exporter.user_gt = self._user_gt
122
136
  self._exporter.external_id = self._external_id
123
-
137
+
124
138
  # Getter and setter methods for dynamic properties
125
-
139
+
126
140
  @property
127
141
  def files_to_zip(self):
128
142
  return self._files_to_zip
129
-
143
+
130
144
  @files_to_zip.setter
131
145
  def files_to_zip(self, value):
132
146
  self._files_to_zip = value
133
-
147
+
134
148
  @property
135
149
  def project_name(self):
136
150
  return self._project_name
137
-
151
+
138
152
  @project_name.setter
139
153
  def project_name(self, value):
140
154
  self._project_name = value
141
-
155
+
142
156
  @property
143
157
  def project_id(self):
144
158
  return self._project_id
145
-
159
+
146
160
  @project_id.setter
147
161
  def project_id(self, value):
148
162
  self._project_id = value
149
-
163
+
150
164
  @property
151
165
  def dataset_name(self):
152
166
  return self._dataset_name
153
-
167
+
154
168
  @dataset_name.setter
155
169
  def dataset_name(self, value):
156
170
  self._dataset_name = value
157
-
171
+
158
172
  @property
159
173
  def user_details(self):
160
174
  return self._user_details
161
-
175
+
162
176
  @user_details.setter
163
177
  def user_details(self, value):
164
178
  self._user_details = value
165
-
179
+
166
180
  @property
167
181
  def base_url(self):
168
182
  return self._base_url
169
-
183
+
170
184
  @base_url.setter
171
185
  def base_url(self, value):
172
186
  self._base_url = value
@@ -174,15 +188,15 @@ class DynamicTraceExporter(SpanExporter):
174
188
  @property
175
189
  def custom_model_cost(self):
176
190
  return self._custom_model_cost
177
-
191
+
178
192
  @custom_model_cost.setter
179
193
  def custom_model_cost(self, value):
180
194
  self._custom_model_cost = value
181
-
195
+
182
196
  @property
183
197
  def max_upload_workers(self):
184
198
  return self._max_upload_workers
185
-
199
+
186
200
  @max_upload_workers.setter
187
201
  def max_upload_workers(self, value):
188
202
  self._max_upload_workers = value
@@ -190,7 +204,7 @@ class DynamicTraceExporter(SpanExporter):
190
204
  @property
191
205
  def user_context(self):
192
206
  return self._user_context
193
-
207
+
194
208
  @user_context.setter
195
209
  def user_context(self, value):
196
210
  self._user_context = value
@@ -198,7 +212,7 @@ class DynamicTraceExporter(SpanExporter):
198
212
  @property
199
213
  def user_gt(self):
200
214
  return self._user_gt
201
-
215
+
202
216
  @user_gt.setter
203
217
  def user_gt(self, value):
204
218
  self._user_gt = value
@@ -206,8 +220,7 @@ class DynamicTraceExporter(SpanExporter):
206
220
  @property
207
221
  def external_id(self):
208
222
  return self._external_id
209
-
223
+
210
224
  @external_id.setter
211
225
  def external_id(self, value):
212
226
  self._external_id = value
213
-
@@ -3,6 +3,7 @@ import logging
3
3
  import os
4
4
  import tempfile
5
5
  from dataclasses import asdict
6
+ from typing import Optional, Callable, Dict, List
6
7
 
7
8
  from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
8
9
 
@@ -23,6 +24,7 @@ logging_level = (
23
24
  logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
24
25
  )
25
26
 
27
+
26
28
  class TracerJSONEncoder(json.JSONEncoder):
27
29
  def default(self, obj):
28
30
  if isinstance(obj, datetime):
@@ -47,8 +49,25 @@ class TracerJSONEncoder(json.JSONEncoder):
47
49
  except:
48
50
  return None # Last resort: return None instead of failing
49
51
 
52
+
50
53
  class RAGATraceExporter(SpanExporter):
51
- def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
54
+ def __init__(
55
+ self,
56
+ project_name: str,
57
+ dataset_name: str,
58
+ base_url: str,
59
+ tracer_type: str,
60
+ files_to_zip: Optional[List[str]] = None,
61
+ project_id: Optional[str] = None,
62
+ user_details: Optional[Dict] = None,
63
+ custom_model_cost: Optional[dict] = None,
64
+ timeout: int = 120,
65
+ post_processor: Optional[Callable] = None,
66
+ max_upload_workers: int = 30,
67
+ user_context: Optional[str] = None,
68
+ user_gt: Optional[str] = None,
69
+ external_id: Optional[str] = None
70
+ ):
52
71
  self.trace_spans = dict()
53
72
  self.tmp_dir = tempfile.gettempdir()
54
73
  self.tracer_type = tracer_type
@@ -77,7 +96,7 @@ class RAGATraceExporter(SpanExporter):
77
96
 
78
97
  if trace_id not in self.trace_spans:
79
98
  self.trace_spans[trace_id] = list()
80
-
99
+
81
100
  if span_json.get("attributes").get("openinference.span.kind", None) is None:
82
101
  span_json["attributes"]["openinference.span.kind"] = "UNKNOWN"
83
102
 
@@ -118,10 +137,10 @@ class RAGATraceExporter(SpanExporter):
118
137
  if ragaai_trace_details is None:
119
138
  logger.error(f"Cannot upload trace {trace_id}: conversion failed and returned None")
120
139
  return # Exit early if conversion failed
121
-
140
+
122
141
  # Upload the trace if upload_trace function is provided
123
142
  try:
124
- if self.post_processor!=None:
143
+ if self.post_processor != None:
125
144
  ragaai_trace_details['trace_file_path'] = self.post_processor(ragaai_trace_details['trace_file_path'])
126
145
  self.upload_trace(ragaai_trace_details, trace_id)
127
146
  except Exception as e:
@@ -130,13 +149,14 @@ class RAGATraceExporter(SpanExporter):
130
149
  def prepare_trace(self, spans, trace_id):
131
150
  try:
132
151
  try:
133
- ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,self.external_id)
152
+ ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,
153
+ self.external_id)
134
154
  except Exception as e:
135
155
  print(f"Error in convert_json_format function: {trace_id}: {e}")
136
156
  return None
137
-
157
+
138
158
  try:
139
- interactions = format_interactions(ragaai_trace)
159
+ interactions = format_interactions(ragaai_trace)
140
160
  ragaai_trace["workflow"] = interactions['workflow']
141
161
  except Exception as e:
142
162
  print(f"Error in format_interactions function: {trace_id}: {e}")
@@ -183,18 +203,26 @@ class RAGATraceExporter(SpanExporter):
183
203
  except Exception as e:
184
204
  print(f"Error in adding tracer type: {trace_id}: {e}")
185
205
  return None
186
-
187
- #Add user passed metadata to the trace
206
+
207
+ # Add user passed metadata to the trace
188
208
  try:
189
- if self.user_details.get("trace_user_detail").get("metadata") and isinstance(self.user_details.get("trace_user_detail").get("metadata"), dict):
190
- for key, value in self.user_details.get("trace_user_detail").get("metadata").items():
191
- if key in ["log_source", "recorded_on"]:
192
- continue
193
- ragaai_trace["metadata"][key] = value
209
+ logger.debug("Started adding user passed metadata")
210
+
211
+ metadata = (
212
+ self.user_details.get("trace_user_detail", {}).get("metadata", {})
213
+ if self.user_details else {}
214
+ )
215
+
216
+ if isinstance(metadata, dict):
217
+ for key, value in metadata.items():
218
+ if key not in {"log_source", "recorded_on"}:
219
+ ragaai_trace.setdefault("metadata", {})[key] = value
220
+
221
+ logger.debug("Completed adding user passed metadata")
194
222
  except Exception as e:
195
223
  print(f"Error in adding metadata: {trace_id}: {e}")
196
224
  return None
197
-
225
+
198
226
  try:
199
227
  # Save the trace_json
200
228
  trace_file_path = os.path.join(self.tmp_dir, f"{trace_id}.json")
@@ -220,16 +248,16 @@ class RAGATraceExporter(SpanExporter):
220
248
  hash_id = ragaai_trace_details['hash_id']
221
249
  zip_path = ragaai_trace_details['code_zip_path']
222
250
  self.upload_task_id = submit_upload_task(
223
- filepath=filepath,
224
- hash_id=hash_id,
225
- zip_path=zip_path,
226
- project_name=self.project_name,
227
- project_id=self.project_id,
228
- dataset_name=self.dataset_name,
229
- user_details=self.user_details,
230
- base_url=self.base_url,
231
- tracer_type=self.tracer_type,
232
- timeout=self.timeout
233
- )
234
-
235
- logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
251
+ filepath=filepath,
252
+ hash_id=hash_id,
253
+ zip_path=zip_path,
254
+ project_name=self.project_name,
255
+ project_id=self.project_id,
256
+ dataset_name=self.dataset_name,
257
+ user_details=self.user_details,
258
+ base_url=self.base_url,
259
+ tracer_type=self.tracer_type,
260
+ timeout=self.timeout
261
+ )
262
+
263
+ logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
@@ -170,7 +170,7 @@ class Tracer(AgenticTracing):
170
170
  logger.error(f"Failed to retrieve projects list: {e}")
171
171
 
172
172
  # Handle agentic tracers
173
- if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain" or tracer_type == "google-adk":
173
+ if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain" or tracer_type == "llamaindex" or tracer_type == "google-adk":
174
174
  # Setup instrumentors based on tracer type
175
175
  instrumentors = []
176
176
 
@@ -600,13 +600,13 @@ class Tracer(AgenticTracing):
600
600
 
601
601
  # Create a dynamic exporter that allows property updates
602
602
  self.dynamic_exporter = DynamicTraceExporter(
603
+ project_name=self.project_name,
604
+ dataset_name=self.dataset_name,
605
+ base_url=self.base_url,
603
606
  tracer_type=self.tracer_type,
604
607
  files_to_zip=list_of_unique_files,
605
- project_name=self.project_name,
606
608
  project_id=self.project_id,
607
- dataset_name=self.dataset_name,
608
609
  user_details=self.user_details,
609
- base_url=self.base_url,
610
610
  custom_model_cost=self.model_custom_cost,
611
611
  timeout = self.timeout,
612
612
  post_processor= self.post_processor,
@@ -188,7 +188,13 @@ def convert_json_format(
188
188
  ]
189
189
  model_name = next((name for name in reversed(model_names) if name), "")
190
190
  if not model_name and span["attributes"].get("openinference.span.kind")=="LLM":
191
- model_name = json.loads(span["attributes"].get("metadata", "")).get("ls_model_name", "")
191
+ try:
192
+ metadata = span["attributes"].get("metadata") or span["attributes"].get("aiq.metadata")
193
+ metadata = json.loads(metadata)
194
+ model_name = metadata.get("ls_model_name", "")
195
+ except Exception as e:
196
+ model_name = ""
197
+ logger.error(f"Failed to parse metadata: {e}", exc_info=True)
192
198
  if model_name and span["attributes"].get("openinference.span.kind") == "LLM":
193
199
  try:
194
200
  model_costs = get_model_cost()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ragaai_catalyst
3
- Version: 2.2.5b4
3
+ Version: 2.2.5b6
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
6
6
  Requires-Python: <=3.13.2,>=3.10
@@ -27,7 +27,7 @@ ragaai_catalyst/redteaming/utils/issue_description.py,sha256=iB0XbeOjdqHTPrikCKS
27
27
  ragaai_catalyst/redteaming/utils/rt.png,sha256=HzVC8bz_4UgwafKXuMe8RJVI6CyK_UmSgo53ceAOQK8,282154
28
28
  ragaai_catalyst/tracers/__init__.py,sha256=LfgTes-nHpazssbGKnn8kyLZNr49kIPrlkrqqoTFTfc,301
29
29
  ragaai_catalyst/tracers/distributed.py,sha256=CGPuOh4CsgEk428PPibieLaAG2Tt3BVygF6ZlmbXxg4,10009
30
- ragaai_catalyst/tracers/tracer.py,sha256=Ig9e6Je2tFFXWsDXxIijcZr_Hm2AO8ugoF_tup5XQLc,33618
30
+ ragaai_catalyst/tracers/tracer.py,sha256=uLSrN4HZT19YmEG5dTPx_aauuNU8UfflNTn3xjshfmI,33649
31
31
  ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
32
32
  ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
33
33
  ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -36,30 +36,30 @@ ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-
36
36
  ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=Wq4LFclPlLy47LyXvbaLeYiSMQABj7VYS3J87xyea_E,4159
37
37
  ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py,sha256=sOlxeIYIP8tycaTtZC9xkZosi6EDJUxvDw0_rc_NLI8,6823
39
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=Ujbu0KDl7oDr-cFtLwrQK_i7ghMuPV92mFnRfobJ1aI,24822
40
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=0u4GWgqtaBz9cnr_KuqVIWDvhHWkgTAOTtiy0w8RPuk,13017
41
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=CbTx2vBAPIat5bdIClv9szOo4i33YL_1v04mkUjNG2c,12170
39
+ ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=3VZjuW3OnElYE-VjhhT9ILvFSG8Rh0sH9n2M_Wds_TU,25146
40
+ ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=yLzYiyNk_XtwwlUFbq5uaRahDKKF-eFBoI6xqHUNedw,14526
41
+ ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=IAhNFS-nbV_ImNz8Xp98qU4r-2naj49qg9q08x53TFE,12521
42
42
  ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
43
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=UCecTolWXp6DwOaRut0EZo8q2e1kAeT_Bv4VeoRSVK8,1506
43
+ ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=tojXeuTigDoLCdPvBKRBWtTHwjtm2tFjTizM7NknSo4,3887
44
44
  ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py,sha256=YG601l1a29ov9VPu9Vl4RXxgL7l16k54_WWnoTNoG58,2064
45
45
  ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=PiyXvEj_qu0EnJFjk4GfGyWFZbwlvQQh0hdQ_lm0p8E,22976
46
46
  ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=2tzGw_cKCTPcfjEm7iGvFE6pTw7gMTPzeBov_MTaXNY,321336
47
47
  ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py,sha256=H8WNsk4v_5T6OUw4TFOzlDLjQhJwjh1nAMyMAoqMEi4,6946
48
48
  ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=W7Nw-IpugejIoHbCtQiN4Sn4ughLocQ9AUCjuAtOhOo,17258
49
49
  ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=G027toV-Km20JjKrc-Y_PilQ8ABEKrBvvzgLTnqVg7I,5819
50
- ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=4TeCGsFF26249fV6dJHLTZDrRa93SG9oer4rudoF8Y4,19443
50
+ ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=Q3KXaqpvCPl7CL4bzwKvE6xhY5JNvfxar6QUXXXz3w0,19688
51
51
  ragaai_catalyst/tracers/exporters/__init__.py,sha256=qA3vx7z9CQ5kTGCn9LIDtIFvW9fJHQLkvF9-xBQUm94,237
52
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=Rm-QaLv1qMAKpHKcFOcK_HWaKHwFBoUH45_4QYipE-g,6843
52
+ ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=cQvBGFOgSNLoyPIvkTrHottlpmg1vcgFgej-J2PwGtY,7149
53
53
  ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=_icciSCktK6c86KB2HV3GZMFHvUitgKJ8x_IdPmgi1M,6363
54
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=M3Bj311o_MpjY1gO9SGpfIIT2O0yz76uCgw6mKUwHdo,10032
54
+ ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=aUooYU7SlT29pYK_p9WO5udlFO-yOEZNeR3RSB-DOBo,10549
55
55
  ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
57
57
  ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256=WlZCZeOQ54aMVjYS8BAeka2uaFC3ftBTMZ8zzzA8TAI,495947
58
58
  ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py,sha256=3ygkRT__lLDRflRttjzPu28tIA8cTCiGQVMQjqMItqQ,11309
59
- ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=-HZVmijeUFLO7e9OAvi1RJdWVTxPRUHPd1MkKQlCD54,11785
59
+ ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=NPsxU04u6MCOMqisrgiAIv1bXFjWNwlrUn-LScC8f-s,12109
60
60
  ragaai_catalyst/tracers/utils/utils.py,sha256=o-p9n2ZuophdrV0wrixu-BqRHCkovup_klc3mS8mU8g,2374
61
- ragaai_catalyst-2.2.5b4.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
62
- ragaai_catalyst-2.2.5b4.dist-info/METADATA,sha256=5mMtdrTjYKckmte2qOrBZso2ldbMSQFcvE4O6hhLfeY,17735
63
- ragaai_catalyst-2.2.5b4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
- ragaai_catalyst-2.2.5b4.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
65
- ragaai_catalyst-2.2.5b4.dist-info/RECORD,,
61
+ ragaai_catalyst-2.2.5b6.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
62
+ ragaai_catalyst-2.2.5b6.dist-info/METADATA,sha256=kHQKCIyv8lTxRSQ7OCFbG_5WSUF83HcVyOmDVcVMCyg,17735
63
+ ragaai_catalyst-2.2.5b6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
+ ragaai_catalyst-2.2.5b6.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
65
+ ragaai_catalyst-2.2.5b6.dist-info/RECORD,,