ragaai-catalyst 2.2.5b5__py3-none-any.whl → 2.2.5b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -481,7 +481,8 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
481
481
 
482
482
  return task_id
483
483
  except RuntimeError as e:
484
- if "cannot schedule new futures after shutdown" in str(e):
484
+ if any(msg in str(e) for msg in
485
+ ("cannot schedule new futures after shutdown", "cannot schedule new futures after interpreter shutdown")):
485
486
  logger.warning(f"Executor already shut down, falling back to synchronous processing: {e}")
486
487
  return do_sync_processing()
487
488
  else:
@@ -1,8 +1,9 @@
1
1
  import os
2
2
  import json
3
- import re
3
+ import requests
4
4
  import logging
5
5
  import time
6
+ from typing import Optional
6
7
  from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
7
8
  from requests.exceptions import ConnectionError, Timeout, RequestException
8
9
  from http.client import RemoteDisconnected
@@ -10,16 +11,26 @@ from http.client import RemoteDisconnected
10
11
  from ragaai_catalyst import RagaAICatalyst
11
12
  from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
12
13
 
14
+ IGNORED_KEYS = {"log_source", "recorded_on"}
13
15
  logger = logging.getLogger(__name__)
14
16
 
15
- def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, user_details=None, timeout=120):
16
- SCHEMA_MAPPING = {}
17
- metadata = user_details.get("trace_user_detail").get("metadata")
18
- if metadata and isinstance(metadata, dict):
17
+ def create_dataset_schema_with_trace(
18
+ project_name: str,
19
+ dataset_name: str,
20
+ base_url: Optional[str] = None,
21
+ user_details: Optional[dict] = None,
22
+ timeout: int = 120) -> requests.Response:
23
+ schema_mapping = {}
24
+
25
+ metadata = (
26
+ user_details.get("trace_user_detail", {}).get("metadata", {})
27
+ if user_details else {}
28
+ )
29
+ if isinstance(metadata, dict):
19
30
  for key, value in metadata.items():
20
- if key in ["log_source", "recorded_on"]:
31
+ if key in IGNORED_KEYS:
21
32
  continue
22
- SCHEMA_MAPPING[key] = {"columnType": "metadata"}
33
+ schema_mapping[key] = {"columnType": "metadata"}
23
34
 
24
35
  headers = {
25
36
  "Content-Type": "application/json",
@@ -27,11 +38,11 @@ def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None,
27
38
  "X-Project-Name": project_name,
28
39
  }
29
40
 
30
- if SCHEMA_MAPPING:
41
+ if schema_mapping:
31
42
  payload = json.dumps({
32
43
  "datasetName": dataset_name,
33
44
  "traceFolderUrl": None,
34
- "schemaMapping": SCHEMA_MAPPING
45
+ "schemaMapping": schema_mapping
35
46
  })
36
47
  else:
37
48
  payload = json.dumps({
@@ -7,6 +7,8 @@ import re
7
7
  import ast
8
8
  import importlib.util
9
9
  import json
10
+ from typing import List, Optional, Tuple
11
+
10
12
  import ipynbname
11
13
  from copy import deepcopy
12
14
 
@@ -460,8 +462,14 @@ class TraceDependencyTracker:
460
462
  logger.debug(f"Zip file created successfully at: {zip_filename}")
461
463
  return hash_id, zip_filename
462
464
 
463
- def zip_list_of_unique_files(filepaths, output_dir=None):
465
+ def zip_list_of_unique_files(
466
+ filepaths: List[str],
467
+ output_dir: Optional[str] = None
468
+ ) -> Tuple[str, str]:
464
469
  """Create a zip file containing all unique files and their dependencies."""
470
+ if not filepaths:
471
+ logger.warning("The filepaths list is empty. Proceeding with an empty ZIP archive.")
472
+ filepaths = []
465
473
  if output_dir is None:
466
474
  # Set default output directory based on environment
467
475
  if JupyterNotebookHandler.is_running_in_colab():
@@ -4,17 +4,34 @@ Dynamic Trace Exporter - A wrapper for RAGATraceExporter that allows dynamic upd
4
4
  import logging
5
5
  from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
6
6
  from ragaai_catalyst.tracers.exporters.ragaai_trace_exporter import RAGATraceExporter
7
+ from typing import Optional, List, Dict, Callable
7
8
 
8
9
  logger = logging.getLogger("RagaAICatalyst")
9
10
 
11
+
10
12
  class DynamicTraceExporter(SpanExporter):
11
13
  """
12
14
  A wrapper around RAGATraceExporter that allows dynamic updates to properties.
13
15
  This exporter forwards all calls to the underlying RAGATraceExporter but allows
14
16
  certain properties to be updated dynamically during execution.
15
17
  """
16
-
17
- def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
18
+ def __init__(
19
+ self,
20
+ project_name: str,
21
+ dataset_name: str,
22
+ base_url: str,
23
+ tracer_type: str,
24
+ files_to_zip: Optional[List[str]] = None,
25
+ project_id: Optional[str] = None,
26
+ user_details: Optional[Dict] = None,
27
+ custom_model_cost: Optional[dict] = None,
28
+ timeout: int = 120,
29
+ post_processor: Optional[Callable] = None,
30
+ max_upload_workers: int = 30,
31
+ user_context: Optional[str] = None,
32
+ user_gt: Optional[str] = None,
33
+ external_id: Optional[str] = None
34
+ ):
18
35
  """
19
36
  Initialize the DynamicTraceExporter.
20
37
 
@@ -30,22 +47,22 @@ class DynamicTraceExporter(SpanExporter):
30
47
  max_upload_workers: Maximum number of upload workers
31
48
  """
32
49
  self._exporter = RAGATraceExporter(
50
+ project_name=project_name,
51
+ dataset_name=dataset_name,
52
+ base_url=base_url,
33
53
  tracer_type=tracer_type,
34
54
  files_to_zip=files_to_zip,
35
- project_name=project_name,
36
55
  project_id=project_id,
37
- dataset_name=dataset_name,
38
56
  user_details=user_details,
39
- base_url=base_url,
40
57
  custom_model_cost=custom_model_cost,
41
58
  timeout=timeout,
42
- post_processor= post_processor,
43
- max_upload_workers = max_upload_workers,
44
- user_context = user_context,
45
- user_gt = user_gt,
59
+ post_processor=post_processor,
60
+ max_upload_workers=max_upload_workers,
61
+ user_context=user_context,
62
+ user_gt=user_gt,
46
63
  external_id=external_id
47
64
  )
48
-
65
+
49
66
  # Store the initial values
50
67
  self._files_to_zip = files_to_zip
51
68
  self._project_name = project_name
@@ -60,7 +77,6 @@ class DynamicTraceExporter(SpanExporter):
60
77
  self._user_gt = user_gt
61
78
  self._external_id = external_id
62
79
 
63
-
64
80
  def export(self, spans):
65
81
  """
66
82
  Export spans by forwarding to the underlying exporter.
@@ -84,8 +100,6 @@ class DynamicTraceExporter(SpanExporter):
84
100
  return result
85
101
  except Exception as e:
86
102
  logger.error(f"Error exporting trace: {e}")
87
-
88
-
89
103
 
90
104
  def shutdown(self):
91
105
  """
@@ -103,7 +117,7 @@ class DynamicTraceExporter(SpanExporter):
103
117
  return self._exporter.shutdown()
104
118
  except Exception as e:
105
119
  logger.error(f"Error shutting down exporter: {e}")
106
-
120
+
107
121
  def _update_exporter_properties(self):
108
122
  """
109
123
  Update the underlying exporter's properties with the current values.
@@ -118,55 +132,55 @@ class DynamicTraceExporter(SpanExporter):
118
132
  self._exporter.post_processor = self._post_processor
119
133
  self._exporter.max_upload_workers = self._max_upload_workers
120
134
  self._exporter.user_context = self._user_context
121
- self._exporter.user_gt = self._user_gt
135
+ self._exporter.user_gt = self._user_gt
122
136
  self._exporter.external_id = self._external_id
123
-
137
+
124
138
  # Getter and setter methods for dynamic properties
125
-
139
+
126
140
  @property
127
141
  def files_to_zip(self):
128
142
  return self._files_to_zip
129
-
143
+
130
144
  @files_to_zip.setter
131
145
  def files_to_zip(self, value):
132
146
  self._files_to_zip = value
133
-
147
+
134
148
  @property
135
149
  def project_name(self):
136
150
  return self._project_name
137
-
151
+
138
152
  @project_name.setter
139
153
  def project_name(self, value):
140
154
  self._project_name = value
141
-
155
+
142
156
  @property
143
157
  def project_id(self):
144
158
  return self._project_id
145
-
159
+
146
160
  @project_id.setter
147
161
  def project_id(self, value):
148
162
  self._project_id = value
149
-
163
+
150
164
  @property
151
165
  def dataset_name(self):
152
166
  return self._dataset_name
153
-
167
+
154
168
  @dataset_name.setter
155
169
  def dataset_name(self, value):
156
170
  self._dataset_name = value
157
-
171
+
158
172
  @property
159
173
  def user_details(self):
160
174
  return self._user_details
161
-
175
+
162
176
  @user_details.setter
163
177
  def user_details(self, value):
164
178
  self._user_details = value
165
-
179
+
166
180
  @property
167
181
  def base_url(self):
168
182
  return self._base_url
169
-
183
+
170
184
  @base_url.setter
171
185
  def base_url(self, value):
172
186
  self._base_url = value
@@ -174,15 +188,15 @@ class DynamicTraceExporter(SpanExporter):
174
188
  @property
175
189
  def custom_model_cost(self):
176
190
  return self._custom_model_cost
177
-
191
+
178
192
  @custom_model_cost.setter
179
193
  def custom_model_cost(self, value):
180
194
  self._custom_model_cost = value
181
-
195
+
182
196
  @property
183
197
  def max_upload_workers(self):
184
198
  return self._max_upload_workers
185
-
199
+
186
200
  @max_upload_workers.setter
187
201
  def max_upload_workers(self, value):
188
202
  self._max_upload_workers = value
@@ -190,7 +204,7 @@ class DynamicTraceExporter(SpanExporter):
190
204
  @property
191
205
  def user_context(self):
192
206
  return self._user_context
193
-
207
+
194
208
  @user_context.setter
195
209
  def user_context(self, value):
196
210
  self._user_context = value
@@ -198,7 +212,7 @@ class DynamicTraceExporter(SpanExporter):
198
212
  @property
199
213
  def user_gt(self):
200
214
  return self._user_gt
201
-
215
+
202
216
  @user_gt.setter
203
217
  def user_gt(self, value):
204
218
  self._user_gt = value
@@ -206,8 +220,7 @@ class DynamicTraceExporter(SpanExporter):
206
220
  @property
207
221
  def external_id(self):
208
222
  return self._external_id
209
-
223
+
210
224
  @external_id.setter
211
225
  def external_id(self, value):
212
226
  self._external_id = value
213
-
@@ -3,6 +3,7 @@ import logging
3
3
  import os
4
4
  import tempfile
5
5
  from dataclasses import asdict
6
+ from typing import Optional, Callable, Dict, List
6
7
 
7
8
  from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
8
9
 
@@ -23,6 +24,7 @@ logging_level = (
23
24
  logger.setLevel(logging.DEBUG) if os.getenv("DEBUG") == "1" else logging.INFO
24
25
  )
25
26
 
27
+
26
28
  class TracerJSONEncoder(json.JSONEncoder):
27
29
  def default(self, obj):
28
30
  if isinstance(obj, datetime):
@@ -47,8 +49,25 @@ class TracerJSONEncoder(json.JSONEncoder):
47
49
  except:
48
50
  return None # Last resort: return None instead of failing
49
51
 
52
+
50
53
  class RAGATraceExporter(SpanExporter):
51
- def __init__(self, tracer_type, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120, post_processor = None, max_upload_workers = 30,user_context = None, user_gt = None, external_id=None):
54
+ def __init__(
55
+ self,
56
+ project_name: str,
57
+ dataset_name: str,
58
+ base_url: str,
59
+ tracer_type: str,
60
+ files_to_zip: Optional[List[str]] = None,
61
+ project_id: Optional[str] = None,
62
+ user_details: Optional[Dict] = None,
63
+ custom_model_cost: Optional[dict] = None,
64
+ timeout: int = 120,
65
+ post_processor: Optional[Callable] = None,
66
+ max_upload_workers: int = 30,
67
+ user_context: Optional[str] = None,
68
+ user_gt: Optional[str] = None,
69
+ external_id: Optional[str] = None
70
+ ):
52
71
  self.trace_spans = dict()
53
72
  self.tmp_dir = tempfile.gettempdir()
54
73
  self.tracer_type = tracer_type
@@ -77,7 +96,7 @@ class RAGATraceExporter(SpanExporter):
77
96
 
78
97
  if trace_id not in self.trace_spans:
79
98
  self.trace_spans[trace_id] = list()
80
-
99
+
81
100
  if span_json.get("attributes").get("openinference.span.kind", None) is None:
82
101
  span_json["attributes"]["openinference.span.kind"] = "UNKNOWN"
83
102
 
@@ -118,10 +137,10 @@ class RAGATraceExporter(SpanExporter):
118
137
  if ragaai_trace_details is None:
119
138
  logger.error(f"Cannot upload trace {trace_id}: conversion failed and returned None")
120
139
  return # Exit early if conversion failed
121
-
140
+
122
141
  # Upload the trace if upload_trace function is provided
123
142
  try:
124
- if self.post_processor!=None:
143
+ if self.post_processor != None:
125
144
  ragaai_trace_details['trace_file_path'] = self.post_processor(ragaai_trace_details['trace_file_path'])
126
145
  self.upload_trace(ragaai_trace_details, trace_id)
127
146
  except Exception as e:
@@ -130,13 +149,14 @@ class RAGATraceExporter(SpanExporter):
130
149
  def prepare_trace(self, spans, trace_id):
131
150
  try:
132
151
  try:
133
- ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,self.external_id)
152
+ ragaai_trace = convert_json_format(spans, self.custom_model_cost, self.user_context, self.user_gt,
153
+ self.external_id)
134
154
  except Exception as e:
135
155
  print(f"Error in convert_json_format function: {trace_id}: {e}")
136
156
  return None
137
-
157
+
138
158
  try:
139
- interactions = format_interactions(ragaai_trace)
159
+ interactions = format_interactions(ragaai_trace)
140
160
  ragaai_trace["workflow"] = interactions['workflow']
141
161
  except Exception as e:
142
162
  print(f"Error in format_interactions function: {trace_id}: {e}")
@@ -183,18 +203,26 @@ class RAGATraceExporter(SpanExporter):
183
203
  except Exception as e:
184
204
  print(f"Error in adding tracer type: {trace_id}: {e}")
185
205
  return None
186
-
187
- #Add user passed metadata to the trace
206
+
207
+ # Add user passed metadata to the trace
188
208
  try:
189
- if self.user_details.get("trace_user_detail").get("metadata") and isinstance(self.user_details.get("trace_user_detail").get("metadata"), dict):
190
- for key, value in self.user_details.get("trace_user_detail").get("metadata").items():
191
- if key in ["log_source", "recorded_on"]:
192
- continue
193
- ragaai_trace["metadata"][key] = value
209
+ logger.debug("Started adding user passed metadata")
210
+
211
+ metadata = (
212
+ self.user_details.get("trace_user_detail", {}).get("metadata", {})
213
+ if self.user_details else {}
214
+ )
215
+
216
+ if isinstance(metadata, dict):
217
+ for key, value in metadata.items():
218
+ if key not in {"log_source", "recorded_on"}:
219
+ ragaai_trace.setdefault("metadata", {})[key] = value
220
+
221
+ logger.debug("Completed adding user passed metadata")
194
222
  except Exception as e:
195
223
  print(f"Error in adding metadata: {trace_id}: {e}")
196
224
  return None
197
-
225
+
198
226
  try:
199
227
  # Save the trace_json
200
228
  trace_file_path = os.path.join(self.tmp_dir, f"{trace_id}.json")
@@ -220,16 +248,16 @@ class RAGATraceExporter(SpanExporter):
220
248
  hash_id = ragaai_trace_details['hash_id']
221
249
  zip_path = ragaai_trace_details['code_zip_path']
222
250
  self.upload_task_id = submit_upload_task(
223
- filepath=filepath,
224
- hash_id=hash_id,
225
- zip_path=zip_path,
226
- project_name=self.project_name,
227
- project_id=self.project_id,
228
- dataset_name=self.dataset_name,
229
- user_details=self.user_details,
230
- base_url=self.base_url,
231
- tracer_type=self.tracer_type,
232
- timeout=self.timeout
233
- )
234
-
235
- logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
251
+ filepath=filepath,
252
+ hash_id=hash_id,
253
+ zip_path=zip_path,
254
+ project_name=self.project_name,
255
+ project_id=self.project_id,
256
+ dataset_name=self.dataset_name,
257
+ user_details=self.user_details,
258
+ base_url=self.base_url,
259
+ tracer_type=self.tracer_type,
260
+ timeout=self.timeout
261
+ )
262
+
263
+ logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
@@ -600,13 +600,13 @@ class Tracer(AgenticTracing):
600
600
 
601
601
  # Create a dynamic exporter that allows property updates
602
602
  self.dynamic_exporter = DynamicTraceExporter(
603
+ project_name=self.project_name,
604
+ dataset_name=self.dataset_name,
605
+ base_url=self.base_url,
603
606
  tracer_type=self.tracer_type,
604
607
  files_to_zip=list_of_unique_files,
605
- project_name=self.project_name,
606
608
  project_id=self.project_id,
607
- dataset_name=self.dataset_name,
608
609
  user_details=self.user_details,
609
- base_url=self.base_url,
610
610
  custom_model_cost=self.model_custom_cost,
611
611
  timeout = self.timeout,
612
612
  post_processor= self.post_processor,
@@ -188,7 +188,13 @@ def convert_json_format(
188
188
  ]
189
189
  model_name = next((name for name in reversed(model_names) if name), "")
190
190
  if not model_name and span["attributes"].get("openinference.span.kind")=="LLM":
191
- model_name = json.loads(span["attributes"].get("metadata", "")).get("ls_model_name", "")
191
+ try:
192
+ metadata = span["attributes"].get("metadata") or span["attributes"].get("aiq.metadata")
193
+ metadata = json.loads(metadata)
194
+ model_name = metadata.get("ls_model_name", "")
195
+ except Exception as e:
196
+ model_name = ""
197
+ logger.error(f"Failed to parse metadata: {e}", exc_info=True)
192
198
  if model_name and span["attributes"].get("openinference.span.kind") == "LLM":
193
199
  try:
194
200
  model_costs = get_model_cost()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ragaai_catalyst
3
- Version: 2.2.5b5
3
+ Version: 2.2.5b6
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
6
6
  Requires-Python: <=3.13.2,>=3.10
@@ -27,7 +27,7 @@ ragaai_catalyst/redteaming/utils/issue_description.py,sha256=iB0XbeOjdqHTPrikCKS
27
27
  ragaai_catalyst/redteaming/utils/rt.png,sha256=HzVC8bz_4UgwafKXuMe8RJVI6CyK_UmSgo53ceAOQK8,282154
28
28
  ragaai_catalyst/tracers/__init__.py,sha256=LfgTes-nHpazssbGKnn8kyLZNr49kIPrlkrqqoTFTfc,301
29
29
  ragaai_catalyst/tracers/distributed.py,sha256=CGPuOh4CsgEk428PPibieLaAG2Tt3BVygF6ZlmbXxg4,10009
30
- ragaai_catalyst/tracers/tracer.py,sha256=5jM-AJozLoc95OIR-K3K2fs0OJ2Oo47Lgtca_6ToFzk,33649
30
+ ragaai_catalyst/tracers/tracer.py,sha256=uLSrN4HZT19YmEG5dTPx_aauuNU8UfflNTn3xjshfmI,33649
31
31
  ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
32
32
  ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
33
33
  ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -36,30 +36,30 @@ ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-
36
36
  ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=Wq4LFclPlLy47LyXvbaLeYiSMQABj7VYS3J87xyea_E,4159
37
37
  ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py,sha256=sOlxeIYIP8tycaTtZC9xkZosi6EDJUxvDw0_rc_NLI8,6823
39
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=NuetePZOdDmwRgN3aAsQrDIytXFicNylaAqORdz8C2o,25051
39
+ ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=3VZjuW3OnElYE-VjhhT9ILvFSG8Rh0sH9n2M_Wds_TU,25146
40
40
  ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=yLzYiyNk_XtwwlUFbq5uaRahDKKF-eFBoI6xqHUNedw,14526
41
41
  ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=IAhNFS-nbV_ImNz8Xp98qU4r-2naj49qg9q08x53TFE,12521
42
42
  ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
43
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=1c6HA_Alm5yofF-eifqwdwHboxr25mvW3xxVYBktfjA,3667
43
+ ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=tojXeuTigDoLCdPvBKRBWtTHwjtm2tFjTizM7NknSo4,3887
44
44
  ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py,sha256=YG601l1a29ov9VPu9Vl4RXxgL7l16k54_WWnoTNoG58,2064
45
45
  ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=PiyXvEj_qu0EnJFjk4GfGyWFZbwlvQQh0hdQ_lm0p8E,22976
46
46
  ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=2tzGw_cKCTPcfjEm7iGvFE6pTw7gMTPzeBov_MTaXNY,321336
47
47
  ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py,sha256=H8WNsk4v_5T6OUw4TFOzlDLjQhJwjh1nAMyMAoqMEi4,6946
48
48
  ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=W7Nw-IpugejIoHbCtQiN4Sn4ughLocQ9AUCjuAtOhOo,17258
49
49
  ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=G027toV-Km20JjKrc-Y_PilQ8ABEKrBvvzgLTnqVg7I,5819
50
- ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=4TeCGsFF26249fV6dJHLTZDrRa93SG9oer4rudoF8Y4,19443
50
+ ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=Q3KXaqpvCPl7CL4bzwKvE6xhY5JNvfxar6QUXXXz3w0,19688
51
51
  ragaai_catalyst/tracers/exporters/__init__.py,sha256=qA3vx7z9CQ5kTGCn9LIDtIFvW9fJHQLkvF9-xBQUm94,237
52
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=Rm-QaLv1qMAKpHKcFOcK_HWaKHwFBoUH45_4QYipE-g,6843
52
+ ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=cQvBGFOgSNLoyPIvkTrHottlpmg1vcgFgej-J2PwGtY,7149
53
53
  ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=_icciSCktK6c86KB2HV3GZMFHvUitgKJ8x_IdPmgi1M,6363
54
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=M3Bj311o_MpjY1gO9SGpfIIT2O0yz76uCgw6mKUwHdo,10032
54
+ ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=aUooYU7SlT29pYK_p9WO5udlFO-yOEZNeR3RSB-DOBo,10549
55
55
  ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
57
57
  ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256=WlZCZeOQ54aMVjYS8BAeka2uaFC3ftBTMZ8zzzA8TAI,495947
58
58
  ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py,sha256=3ygkRT__lLDRflRttjzPu28tIA8cTCiGQVMQjqMItqQ,11309
59
- ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=-HZVmijeUFLO7e9OAvi1RJdWVTxPRUHPd1MkKQlCD54,11785
59
+ ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=NPsxU04u6MCOMqisrgiAIv1bXFjWNwlrUn-LScC8f-s,12109
60
60
  ragaai_catalyst/tracers/utils/utils.py,sha256=o-p9n2ZuophdrV0wrixu-BqRHCkovup_klc3mS8mU8g,2374
61
- ragaai_catalyst-2.2.5b5.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
62
- ragaai_catalyst-2.2.5b5.dist-info/METADATA,sha256=Oho91NQ2Otn-agTiKESdvBL1iZwLyQLeBtWTgu5nfc8,17735
63
- ragaai_catalyst-2.2.5b5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
- ragaai_catalyst-2.2.5b5.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
65
- ragaai_catalyst-2.2.5b5.dist-info/RECORD,,
61
+ ragaai_catalyst-2.2.5b6.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
62
+ ragaai_catalyst-2.2.5b6.dist-info/METADATA,sha256=kHQKCIyv8lTxRSQ7OCFbG_5WSUF83HcVyOmDVcVMCyg,17735
63
+ ragaai_catalyst-2.2.5b6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
64
+ ragaai_catalyst-2.2.5b6.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
65
+ ragaai_catalyst-2.2.5b6.dist-info/RECORD,,