ragaai-catalyst 2.1.5b1__py3-none-any.whl → 2.1.5b3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -434,17 +434,16 @@ class Dataset:
434
434
 
435
435
  # Check response
436
436
  response_data = response.json()
437
- if not response_data.get('success', False):
438
- raise ValueError(response_data.get('message', 'Unknown error occurred'))
439
-
440
- print(f"Successfully added rows to dataset {dataset_name}")
441
- return response_data
437
+ if response_data.get('success', False):
438
+ print(f"{response_data['message']}")
439
+ else:
440
+ raise ValueError(response_data.get('message', 'Failed to add rows'))
442
441
 
443
442
  except Exception as e:
444
443
  logger.error(f"Error in add_rows_to_dataset: {e}")
445
444
  raise
446
445
 
447
- def add_columns(self,text_fields,dataset_name, column_name, provider, model,variables={}):
446
+ def add_columns(self, text_fields, dataset_name, column_name, provider, model, variables={}):
448
447
  """
449
448
  Add a column to a dataset with dynamically fetched model parameters
450
449
 
@@ -571,7 +570,7 @@ class Dataset:
571
570
  variable_specs.append({
572
571
  "name": key,
573
572
  "type": "string",
574
- "schema": values
573
+ "schema": "query"
575
574
  })
576
575
  add_column_payload["promptTemplate"]["variableSpecs"] = variable_specs
577
576
 
@@ -593,9 +592,10 @@ class Dataset:
593
592
  response.raise_for_status()
594
593
  response_data = response.json()
595
594
 
596
- print("Column added successfully:")
597
- print(json.dumps(response_data, indent=2))
598
- return response_data
595
+ if response_data.get('success', False):
596
+ print(f"Column '{column_name}' added successfully to dataset '{dataset_name}'")
597
+ else:
598
+ raise ValueError(response_data.get('message', 'Failed to add column'))
599
599
 
600
600
  except requests.exceptions.RequestException as e:
601
601
  print(f"Error adding column: {e}")
@@ -2,7 +2,7 @@ import os
2
2
  import logging
3
3
  import requests
4
4
  from typing import Dict, Optional, Union
5
-
5
+ import re
6
6
  logger = logging.getLogger("RagaAICatalyst")
7
7
 
8
8
 
@@ -55,10 +55,11 @@ class RagaAICatalyst:
55
55
  self.api_keys = api_keys or {}
56
56
 
57
57
  if base_url:
58
- RagaAICatalyst.BASE_URL = base_url
58
+ RagaAICatalyst.BASE_URL = self._normalize_base_url(base_url)
59
59
  try:
60
+ #set the os.environ["RAGAAI_CATALYST_BASE_URL"] before getting the token as it is used in the get_token method
61
+ os.environ["RAGAAI_CATALYST_BASE_URL"] = RagaAICatalyst.BASE_URL
60
62
  self.get_token()
61
- os.environ["RAGAAI_CATALYST_BASE_URL"] = base_url
62
63
  except requests.exceptions.RequestException:
63
64
  raise ConnectionError(
64
65
  "The provided base_url is not accessible. Please re-check the base_url."
@@ -71,6 +72,14 @@ class RagaAICatalyst:
71
72
  if self.api_keys:
72
73
  self._upload_keys()
73
74
 
75
+ @staticmethod
76
+ def _normalize_base_url(url):
77
+ url = re.sub(r'(?<!:)//+', '/', url) # Ignore the `://` part of URLs and remove extra // if any
78
+ url = url.rstrip("/") # To remove trailing slashes
79
+ if not url.endswith("/api"): # To ensure it ends with /api
80
+ url = f"{url}/api"
81
+ return url
82
+
74
83
  def _set_access_key_secret_key(self, access_key, secret_key):
75
84
  os.environ["RAGAAI_CATALYST_ACCESS_KEY"] = access_key
76
85
  os.environ["RAGAAI_CATALYST_SECRET_KEY"] = secret_key
@@ -1,8 +1,5 @@
1
1
  import json
2
2
  import os
3
- import platform
4
- import psutil
5
- import pkg_resources
6
3
  from datetime import datetime
7
4
  from pathlib import Path
8
5
  from typing import List, Any, Dict
@@ -16,20 +13,9 @@ from ..data.data_structure import (
16
13
  Trace,
17
14
  Metadata,
18
15
  SystemInfo,
19
- OSInfo,
20
- EnvironmentInfo,
21
16
  Resources,
22
- CPUResource,
23
- MemoryResource,
24
- DiskResource,
25
- NetworkResource,
26
- ResourceInfo,
27
- MemoryInfo,
28
- DiskInfo,
29
- NetworkInfo,
30
17
  Component,
31
18
  )
32
-
33
19
  from ..upload.upload_agentic_traces import UploadAgenticTraces
34
20
  from ..upload.upload_code import upload_code
35
21
  from ..upload.upload_trace_metric import upload_trace_metric
@@ -37,9 +23,8 @@ from ..utils.file_name_tracker import TrackName
37
23
  from ..utils.zip_list_of_unique_files import zip_list_of_unique_files
38
24
  from ..utils.span_attributes import SpanAttributes
39
25
  from ..utils.create_dataset_schema import create_dataset_schema_with_trace
26
+ from ..utils.system_monitor import SystemMonitor
40
27
 
41
-
42
- # Configure logging to show debug messages (which includes info messages as well)
43
28
  import logging
44
29
 
45
30
  logger = logging.getLogger(__name__)
@@ -76,12 +61,12 @@ class TracerJSONEncoder(json.JSONEncoder):
76
61
  class BaseTracer:
77
62
  def __init__(self, user_details):
78
63
  self.user_details = user_details
79
- self.project_name = self.user_details["project_name"] # Access the project_name
80
- self.dataset_name = self.user_details["dataset_name"] # Access the dataset_name
81
- self.project_id = self.user_details["project_id"] # Access the project_id
82
- self.trace_name = self.user_details["trace_name"] # Access the trace_name
64
+ self.project_name = self.user_details["project_name"]
65
+ self.dataset_name = self.user_details["dataset_name"]
66
+ self.project_id = self.user_details["project_id"]
67
+ self.trace_name = self.user_details["trace_name"]
83
68
  self.visited_metrics = []
84
- self.trace_metrics = [] # Store metrics here
69
+ self.trace_metrics = []
85
70
 
86
71
  # Initialize trace data
87
72
  self.trace_id = None
@@ -97,117 +82,60 @@ class BaseTracer:
97
82
  self.network_usage_list = []
98
83
  self.tracking_thread = None
99
84
  self.tracking = False
85
+ self.system_monitor = None
100
86
 
101
87
  def _get_system_info(self) -> SystemInfo:
102
- # Get OS info
103
- os_info = OSInfo(
104
- name=platform.system(),
105
- version=platform.version(),
106
- platform=platform.machine(),
107
- kernel_version=platform.release(),
108
- )
109
-
110
- # Get Python environment info
111
- installed_packages = [
112
- f"{pkg.key}=={pkg.version}" for pkg in pkg_resources.working_set
113
- ]
114
- env_info = EnvironmentInfo(
115
- name="Python",
116
- version=platform.python_version(),
117
- packages=installed_packages,
118
- env_path=sys.prefix,
119
- command_to_run=f"python {sys.argv[0]}",
120
- )
121
-
122
- return SystemInfo(
123
- id=f"sys_{self.trace_id}",
124
- os=os_info,
125
- environment=env_info,
126
- source_code="Path to the source code .zip file in format hashid.zip", # TODO: Implement source code archiving
127
- )
88
+ return self.system_monitor.get_system_info()
128
89
 
129
90
  def _get_resources(self) -> Resources:
130
- # CPU info
131
- cpu_info = ResourceInfo(
132
- name=platform.processor(),
133
- cores=psutil.cpu_count(logical=False),
134
- threads=psutil.cpu_count(logical=True),
135
- )
136
- cpu = CPUResource(info=cpu_info, interval="5s", values=[psutil.cpu_percent()])
137
-
138
- # Memory info
139
- memory = psutil.virtual_memory()
140
- mem_info = MemoryInfo(
141
- total=memory.total / (1024**3), # Convert to GB
142
- free=memory.available / (1024**3),
143
- )
144
- mem = MemoryResource(info=mem_info, interval="5s", values=[memory.percent])
145
-
146
- # Disk info
147
- disk = psutil.disk_usage("/")
148
- disk_info = DiskInfo(total=disk.total / (1024**3), free=disk.free / (1024**3))
149
- disk_io = psutil.disk_io_counters()
150
- disk_resource = DiskResource(
151
- info=disk_info,
152
- interval="5s",
153
- read=[disk_io.read_bytes / (1024**2)], # MB
154
- write=[disk_io.write_bytes / (1024**2)],
155
- )
156
-
157
- # Network info
158
- net_io = psutil.net_io_counters()
159
- net_info = NetworkInfo(
160
- upload_speed=net_io.bytes_sent / (1024**2), # MB
161
- download_speed=net_io.bytes_recv / (1024**2),
162
- )
163
- net = NetworkResource(
164
- info=net_info,
165
- interval="5s",
166
- uploads=[net_io.bytes_sent / (1024**2)],
167
- downloads=[net_io.bytes_recv / (1024**2)],
168
- )
169
-
170
- return Resources(cpu=cpu, memory=mem, disk=disk_resource, network=net)
91
+ return self.system_monitor.get_resources()
171
92
 
172
93
  def _track_memory_usage(self):
173
94
  self.memory_usage_list = []
174
95
  while self.tracking:
175
- memory_usage = psutil.Process().memory_info().rss
176
- self.memory_usage_list.append(memory_usage / (1024 * 1024)) # Convert to MB and append to the list
177
- time.sleep(self.interval_time)
96
+ usage = self.system_monitor.track_memory_usage()
97
+ self.memory_usage_list.append(usage)
98
+ try:
99
+ time.sleep(self.interval_time)
100
+ except Exception as e:
101
+ logger.warning(f"Sleep interrupted in memory tracking: {str(e)}")
178
102
 
179
103
  def _track_cpu_usage(self):
180
104
  self.cpu_usage_list = []
181
105
  while self.tracking:
182
- cpu_usage = psutil.cpu_percent(interval=self.interval_time)
183
- self.cpu_usage_list.append(cpu_usage)
184
- time.sleep(self.interval_time)
106
+ usage = self.system_monitor.track_cpu_usage(self.interval_time)
107
+ self.cpu_usage_list.append(usage)
108
+ try:
109
+ time.sleep(self.interval_time)
110
+ except Exception as e:
111
+ logger.warning(f"Sleep interrupted in CPU tracking: {str(e)}")
185
112
 
186
113
  def _track_disk_usage(self):
187
114
  self.disk_usage_list = []
188
115
  while self.tracking:
189
- disk_io = psutil.disk_io_counters()
190
- self.disk_usage_list.append({
191
- 'disk_read': disk_io.read_bytes / (1024 * 1024), # Convert to MB
192
- 'disk_write': disk_io.write_bytes / (1024 * 1024) # Convert to MB
193
- })
194
- time.sleep(self.interval_time)
116
+ usage = self.system_monitor.track_disk_usage()
117
+ self.disk_usage_list.append(usage)
118
+ try:
119
+ time.sleep(self.interval_time)
120
+ except Exception as e:
121
+ logger.warning(f"Sleep interrupted in disk tracking: {str(e)}")
195
122
 
196
123
  def _track_network_usage(self):
197
124
  self.network_usage_list = []
198
125
  while self.tracking:
199
- net_io = psutil.net_io_counters()
200
- self.network_usage_list.append({
201
- 'uploads': net_io.bytes_sent / (1024 * 1024), # Convert to MB
202
- 'downloads': net_io.bytes_recv / (1024 * 1024) # Convert to MB
203
- })
204
- time.sleep(self.interval_time)
126
+ usage = self.system_monitor.track_network_usage()
127
+ self.network_usage_list.append(usage)
128
+ try:
129
+ time.sleep(self.interval_time)
130
+ except Exception as e:
131
+ logger.warning(f"Sleep interrupted in network tracking: {str(e)}")
205
132
 
206
133
  def start(self):
207
134
  """Initialize a new trace"""
208
135
  self.tracking = True
209
- self.tracking_thread = threading.Thread(target=self._track_memory_usage)
210
- self.tracking_thread.start()
136
+ self.trace_id = str(uuid.uuid4())
137
+ self.system_monitor = SystemMonitor(self.trace_id)
138
+ threading.Thread(target=self._track_memory_usage).start()
211
139
  threading.Thread(target=self._track_cpu_usage).start()
212
140
  threading.Thread(target=self._track_disk_usage).start()
213
141
  threading.Thread(target=self._track_network_usage).start()
@@ -223,9 +151,6 @@ class BaseTracer:
223
151
  resources=self._get_resources(),
224
152
  )
225
153
 
226
- # Generate a unique trace ID, when trace starts
227
- self.trace_id = str(uuid.uuid4())
228
-
229
154
  # Get the start time
230
155
  self.start_time = datetime.now().astimezone().isoformat()
231
156
 
@@ -257,8 +182,6 @@ class BaseTracer:
257
182
 
258
183
  #track memory usage
259
184
  self.tracking = False
260
- if self.tracking_thread is not None:
261
- self.tracking_thread.join()
262
185
  self.trace.metadata.resources.memory.values = self.memory_usage_list
263
186
 
264
187
  #track cpu usage
@@ -695,12 +618,11 @@ class BaseTracer:
695
618
  # Process additional interactions and network calls
696
619
  if "interactions" in child:
697
620
  for interaction in child["interactions"]:
698
- if interaction!=[]:
699
- interaction["id"] = str(interaction_id)
700
- interaction["span_id"] = child.get("id")
701
- interaction["error"] = None
702
- interactions.append(interaction)
703
- interaction_id += 1
621
+ interaction["id"] = str(interaction_id)
622
+ interaction["span_id"] = child.get("id")
623
+ interaction["error"] = None
624
+ interactions.append(interaction)
625
+ interaction_id += 1
704
626
 
705
627
  if "network_calls" in child:
706
628
  for child_network_call in child["network_calls"]:
@@ -877,16 +799,15 @@ class BaseTracer:
877
799
  # Process interactions from span.data if they exist
878
800
  if span.interactions:
879
801
  for span_interaction in span.interactions:
880
- if span_interaction != []:
881
- interaction = {}
882
- interaction["id"] = str(interaction_id)
883
- interaction["span_id"] = span.id
884
- interaction["interaction_type"] = span_interaction.type
885
- interaction["content"] = span_interaction.content
886
- interaction["timestamp"] = span_interaction.timestamp
887
- interaction["error"] = span.error
888
- interactions.append(interaction)
889
- interaction_id += 1
802
+ interaction = {}
803
+ interaction["id"] = str(interaction_id)
804
+ interaction["span_id"] = span.id
805
+ interaction["interaction_type"] = span_interaction.type
806
+ interaction["content"] = span_interaction.content
807
+ interaction["timestamp"] = span_interaction.timestamp
808
+ interaction["error"] = span.error
809
+ interactions.append(interaction)
810
+ interaction_id += 1
890
811
 
891
812
  if span.network_calls:
892
813
  for span_network_call in span.network_calls:
@@ -78,6 +78,7 @@ class LLMTracerMixin:
78
78
 
79
79
  if "openai" in sys.modules:
80
80
  self.patch_openai_methods(sys.modules["openai"])
81
+ self.patch_openai_beta_methods(sys.modules["openai"])
81
82
  if "litellm" in sys.modules:
82
83
  self.patch_litellm_methods(sys.modules["litellm"])
83
84
  if "anthropic" in sys.modules:
@@ -97,6 +98,7 @@ class LLMTracerMixin:
97
98
  self.patch_vertex_ai_methods, "vertexai.generative_models"
98
99
  )
99
100
  wrapt.register_post_import_hook(self.patch_openai_methods, "openai")
101
+ wrapt.register_post_import_hook(self.patch_openai_beta_methods, "openai")
100
102
  wrapt.register_post_import_hook(self.patch_litellm_methods, "litellm")
101
103
  wrapt.register_post_import_hook(self.patch_anthropic_methods, "anthropic")
102
104
  wrapt.register_post_import_hook(
@@ -135,6 +137,42 @@ class LLMTracerMixin:
135
137
  # Log the error but continue execution
136
138
  print(f"Warning: Failed to patch OpenAI methods: {str(e)}")
137
139
 
140
+ def patch_openai_beta_methods(self, openai_module):
141
+ """
142
+ Patch the new openai.beta endpoints (threads, runs, messages, etc.)
143
+ so that calls like openai.beta.threads.create(...) or
144
+ openai.beta.threads.runs.create(...) are automatically traced.
145
+ """
146
+ # Make sure openai_module has a 'beta' attribute
147
+ if not hasattr(openai_module, "beta"):
148
+ return
149
+
150
+ beta_module = openai_module.beta
151
+
152
+ # Patch openai.beta.threads
153
+ import openai
154
+ openai.api_type = "openai"
155
+ if hasattr(beta_module, "threads"):
156
+ threads_obj = beta_module.threads
157
+ # Patch top-level methods on openai.beta.threads
158
+ for method_name in ["create", "list"]:
159
+ if hasattr(threads_obj, method_name):
160
+ self.wrap_method(threads_obj, method_name)
161
+
162
+ # Patch the nested objects: messages, runs
163
+ if hasattr(threads_obj, "messages"):
164
+ messages_obj = threads_obj.messages
165
+ for method_name in ["create", "list"]:
166
+ if hasattr(messages_obj, method_name):
167
+ self.wrap_method(messages_obj, method_name)
168
+
169
+ if hasattr(threads_obj, "runs"):
170
+ runs_obj = threads_obj.runs
171
+ for method_name in ["create", "retrieve", "list"]:
172
+ if hasattr(runs_obj, method_name):
173
+ self.wrap_method(runs_obj, method_name)
174
+
175
+
138
176
  def patch_anthropic_methods(self, module):
139
177
  if hasattr(module, "Anthropic"):
140
178
  client_class = getattr(module, "Anthropic")
@@ -0,0 +1,215 @@
1
+ import platform
2
+ import psutil
3
+ import sys
4
+ import pkg_resources
5
+ import logging
6
+ from typing import Dict, List, Optional
7
+ from ..data.data_structure import (
8
+ SystemInfo,
9
+ OSInfo,
10
+ EnvironmentInfo,
11
+ Resources,
12
+ CPUResource,
13
+ MemoryResource,
14
+ DiskResource,
15
+ NetworkResource,
16
+ ResourceInfo,
17
+ MemoryInfo,
18
+ DiskInfo,
19
+ NetworkInfo,
20
+ )
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+ class SystemMonitor:
25
+ def __init__(self, trace_id: str):
26
+ self.trace_id = trace_id
27
+
28
+ def get_system_info(self) -> SystemInfo:
29
+ # Initialize with None values
30
+ os_info = OSInfo(
31
+ name=None,
32
+ version=None,
33
+ platform=None,
34
+ kernel_version=None,
35
+ )
36
+ env_info = EnvironmentInfo(
37
+ name=None,
38
+ version=None,
39
+ packages=[],
40
+ env_path=None,
41
+ command_to_run=None,
42
+ )
43
+
44
+ try:
45
+ # Get OS info
46
+ os_info = OSInfo(
47
+ name=platform.system(),
48
+ version=platform.version(),
49
+ platform=platform.machine(),
50
+ kernel_version=platform.release(),
51
+ )
52
+ except Exception as e:
53
+ logger.warning(f"Failed to get OS info: {str(e)}")
54
+
55
+ try:
56
+ # Get Python environment info
57
+ installed_packages = [
58
+ f"{pkg.key}=={pkg.version}" for pkg in pkg_resources.working_set
59
+ ]
60
+ env_info = EnvironmentInfo(
61
+ name="Python",
62
+ version=platform.python_version(),
63
+ packages=installed_packages,
64
+ env_path=sys.prefix,
65
+ command_to_run=f"python {sys.argv[0]}",
66
+ )
67
+ except Exception as e:
68
+ logger.warning(f"Failed to get environment info: {str(e)}")
69
+
70
+
71
+ # Always return a valid SystemInfo object
72
+ return SystemInfo(
73
+ id=f"sys_{self.trace_id}",
74
+ os=os_info,
75
+ environment=env_info,
76
+ source_code="",
77
+ )
78
+
79
+ def get_resources(self) -> Resources:
80
+ # Initialize with None values
81
+ cpu_info = ResourceInfo(
82
+ name=None,
83
+ cores=None,
84
+ threads=None,
85
+ )
86
+ cpu = CPUResource(info=cpu_info, interval="5s", values=[])
87
+
88
+ mem_info = MemoryInfo(
89
+ total=None,
90
+ free=None,
91
+ )
92
+ mem = MemoryResource(info=mem_info, interval="5s", values=[])
93
+
94
+ disk_info = DiskInfo(
95
+ total=None,
96
+ free=None,
97
+ )
98
+ disk_resource = DiskResource(
99
+ info=disk_info,
100
+ interval="5s",
101
+ read=[],
102
+ write=[],
103
+ )
104
+
105
+ net_info = NetworkInfo(
106
+ upload_speed=None,
107
+ download_speed=None,
108
+ )
109
+ net = NetworkResource(
110
+ info=net_info,
111
+ interval="5s",
112
+ uploads=[],
113
+ downloads=[],
114
+ )
115
+
116
+ try:
117
+ # CPU info
118
+ cpu_info = ResourceInfo(
119
+ name=platform.processor(),
120
+ cores=psutil.cpu_count(logical=False),
121
+ threads=psutil.cpu_count(logical=True),
122
+ )
123
+ cpu = CPUResource(info=cpu_info, interval="5s", values=[psutil.cpu_percent()])
124
+ except Exception as e:
125
+ logger.warning(f"Failed to get CPU info: {str(e)}")
126
+
127
+
128
+ try:
129
+ # Memory info
130
+ memory = psutil.virtual_memory()
131
+ mem_info = MemoryInfo(
132
+ total=memory.total / (1024**3), # Convert to GB
133
+ free=memory.available / (1024**3),
134
+ )
135
+ mem = MemoryResource(info=mem_info, interval="5s", values=[memory.percent])
136
+ except Exception as e:
137
+ logger.warning(f"Failed to get memory info: {str(e)}")
138
+
139
+
140
+ try:
141
+ # Disk info
142
+ disk = psutil.disk_usage("/")
143
+ disk_info = DiskInfo(total=disk.total / (1024**3), free=disk.free / (1024**3))
144
+ disk_io = psutil.disk_io_counters()
145
+ disk_resource = DiskResource(
146
+ info=disk_info,
147
+ interval="5s",
148
+ read=[disk_io.read_bytes / (1024**2)], # MB
149
+ write=[disk_io.write_bytes / (1024**2)],
150
+ )
151
+ except Exception as e:
152
+ logger.warning(f"Failed to get disk info: {str(e)}")
153
+
154
+ try:
155
+ # Network info
156
+ net_io = psutil.net_io_counters()
157
+ net_info = NetworkInfo(
158
+ upload_speed=net_io.bytes_sent / (1024**2), # MB
159
+ download_speed=net_io.bytes_recv / (1024**2),
160
+ )
161
+ net = NetworkResource(
162
+ info=net_info,
163
+ interval="5s",
164
+ uploads=[net_io.bytes_sent / (1024**2)],
165
+ downloads=[net_io.bytes_recv / (1024**2)],
166
+ )
167
+ except Exception as e:
168
+ logger.warning(f"Failed to get network info: {str(e)}")
169
+
170
+
171
+ # Always return a valid Resources object
172
+ return Resources(cpu=cpu, memory=mem, disk=disk_resource, network=net)
173
+
174
+ def track_memory_usage(self) -> Optional[float]:
175
+ """Track memory usage in MB"""
176
+ try:
177
+ memory_usage = psutil.Process().memory_info().rss
178
+ return memory_usage / (1024 * 1024) # Convert to MB
179
+ except Exception as e:
180
+ logger.warning(f"Failed to track memory usage: {str(e)}")
181
+ return None
182
+
183
+ def track_cpu_usage(self, interval: float) -> Optional[float]:
184
+ """Track CPU usage percentage"""
185
+ try:
186
+ return psutil.cpu_percent(interval=interval)
187
+ except Exception as e:
188
+ logger.warning(f"Failed to track CPU usage: {str(e)}")
189
+ return None
190
+
191
+ def track_disk_usage(self) -> Dict[str, Optional[float]]:
192
+ """Track disk I/O in MB"""
193
+ default_response = {'disk_read': None, 'disk_write': None}
194
+ try:
195
+ disk_io = psutil.disk_io_counters()
196
+ return {
197
+ 'disk_read': disk_io.read_bytes / (1024 * 1024), # Convert to MB
198
+ 'disk_write': disk_io.write_bytes / (1024 * 1024) # Convert to MB
199
+ }
200
+ except Exception as e:
201
+ logger.warning(f"Failed to track disk usage: {str(e)}")
202
+ return default_response
203
+
204
+ def track_network_usage(self) -> Dict[str, Optional[float]]:
205
+ """Track network I/O in MB"""
206
+ default_response = {'uploads': None, 'downloads': None}
207
+ try:
208
+ net_io = psutil.net_io_counters()
209
+ return {
210
+ 'uploads': net_io.bytes_sent / (1024 * 1024), # Convert to MB
211
+ 'downloads': net_io.bytes_recv / (1024 * 1024) # Convert to MB
212
+ }
213
+ except Exception as e:
214
+ logger.warning(f"Failed to track network usage: {str(e)}")
215
+ return default_response
@@ -1,4 +1,6 @@
1
+ from audioop import add
1
2
  import os
3
+ import uuid
2
4
  import datetime
3
5
  import logging
4
6
  import asyncio
@@ -6,6 +8,13 @@ import aiohttp
6
8
  import requests
7
9
  from contextlib import contextmanager
8
10
  from concurrent.futures import ThreadPoolExecutor
11
+ from ragaai_catalyst.tracers.langchain_callback import LangchainTracer
12
+ from ragaai_catalyst.tracers.utils.convert_langchain_callbacks_output import convert_langchain_callbacks_output
13
+
14
+ from ragaai_catalyst.tracers.utils.langchain_tracer_extraction_logic import langchain_tracer_extraction
15
+ from ragaai_catalyst.tracers.upload_traces import UploadTraces
16
+ import tempfile
17
+ import json
9
18
 
10
19
  from opentelemetry.sdk import trace as trace_sdk
11
20
  from opentelemetry.sdk.trace.export import SimpleSpanProcessor
@@ -118,6 +127,7 @@ class Tracer(AgenticTracing):
118
127
  self.timeout = 30
119
128
  self.num_projects = 100
120
129
  self.start_time = datetime.datetime.now().astimezone().isoformat()
130
+ self.model_cost_dict = load_model_costs()
121
131
 
122
132
  if update_llm_cost:
123
133
  # First update the model costs file from GitHub
@@ -152,11 +162,12 @@ class Tracer(AgenticTracing):
152
162
  raise
153
163
 
154
164
  if tracer_type == "langchain":
155
- self.raga_client = RagaExporter(project_name=self.project_name, dataset_name=self.dataset_name)
165
+ # self.raga_client = RagaExporter(project_name=self.project_name, dataset_name=self.dataset_name)
156
166
 
157
- self._tracer_provider = self._setup_provider()
158
- self._instrumentor = self._setup_instrumentor(tracer_type)
159
- self.is_instrumented = False
167
+ # self._tracer_provider = self._setup_provider()
168
+ # self._instrumentor = self._setup_instrumentor(tracer_type)
169
+ # self.is_instrumented = False
170
+ # self._upload_task = None
160
171
  self._upload_task = None
161
172
  elif tracer_type == "llamaindex":
162
173
  self._upload_task = None
@@ -239,11 +250,12 @@ class Tracer(AgenticTracing):
239
250
  def start(self):
240
251
  """Start the tracer."""
241
252
  if self.tracer_type == "langchain":
242
- if not self.is_instrumented:
243
- self._instrumentor().instrument(tracer_provider=self._tracer_provider)
244
- self.is_instrumented = True
245
- print(f"Tracer started for project: {self.project_name}")
246
- return self
253
+ # if not self.is_instrumented:
254
+ # self._instrumentor().instrument(tracer_provider=self._tracer_provider)
255
+ # self.is_instrumented = True
256
+ # print(f"Tracer started for project: {self.project_name}")
257
+ self.langchain_tracer = LangchainTracer()
258
+ return self.langchain_tracer.start()
247
259
  elif self.tracer_type == "llamaindex":
248
260
  from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
249
261
  return LlamaIndexTracer(self._pass_user_data()).start()
@@ -254,17 +266,74 @@ class Tracer(AgenticTracing):
254
266
  def stop(self):
255
267
  """Stop the tracer and initiate trace upload."""
256
268
  if self.tracer_type == "langchain":
257
- if not self.is_instrumented:
258
- logger.warning("Tracer was not started. No traces to upload.")
259
- return "No traces to upload"
260
-
261
- print("Stopping tracer and initiating trace upload...")
262
- self._cleanup()
263
- self._upload_task = self._run_async(self._upload_traces())
264
- self.is_active = False
265
- self.dataset_name = None
269
+ # if not self.is_instrumented:
270
+ # logger.warning("Tracer was not started. No traces to upload.")
271
+ # return "No traces to upload"
272
+
273
+ # print("Stopping tracer and initiating trace upload...")
274
+ # self._cleanup()
275
+ # self._upload_task = self._run_async(self._upload_traces())
276
+ # self.is_active = False
277
+ # self.dataset_name = None
278
+
279
+ # filename = f"langchain_callback_traces.json"
280
+ # filepath = os.path.join(tempfile.gettempdir(), filename)
281
+
282
+ user_detail = self._pass_user_data()
283
+ data, additional_metadata = self.langchain_tracer.stop()
284
+
285
+ # Add cost if possible
286
+ # import pdb; pdb.set_trace()
287
+ if additional_metadata['model_name']:
288
+ try:
289
+ model_cost_data = self.model_cost_dict[additional_metadata['model_name']]
290
+ prompt_cost = additional_metadata["tokens"]["prompt"]*model_cost_data["input_cost_per_token"]
291
+ completion_cost = additional_metadata["tokens"]["completion"]*model_cost_data["output_cost_per_token"]
292
+ # additional_metadata.setdefault('cost', {})["prompt_cost"] = prompt_cost
293
+ # additional_metadata.setdefault('cost', {})["completion_cost"] = completion_cost
294
+ additional_metadata.setdefault('cost', {})["total_cost"] = prompt_cost + completion_cost
295
+ except Exception as e:
296
+ logger.warning(f"Error adding cost: {e}")
297
+
298
+ # with open(filepath, 'r') as f:
299
+ # data = json.load(f)
300
+ additional_metadata["total_tokens"] = additional_metadata["tokens"]["total"]
301
+ additional_metadata["total_cost"] = additional_metadata["cost"]["total_cost"]
302
+
303
+ del additional_metadata["tokens"]
304
+ del additional_metadata["cost"]
305
+
306
+ combined_metadata = user_detail['trace_user_detail']['metadata'].copy()
307
+ combined_metadata.update(additional_metadata)
308
+ combined_metadata
309
+
310
+ langchain_traces = langchain_tracer_extraction(data)
311
+ final_result = convert_langchain_callbacks_output(langchain_traces)
312
+ final_result[0]['project_name'] = user_detail['project_name']
313
+ final_result[0]['trace_id'] = str(uuid.uuid4())
314
+ final_result[0]['session_id'] = None
315
+ final_result[0]['metadata'] = combined_metadata
316
+ final_result[0]['pipeline'] = user_detail['trace_user_detail']['pipeline']
317
+
318
+ filepath_3 = os.path.join(os.getcwd(), "final_result.json")
319
+ with open(filepath_3, 'w') as f:
320
+ json.dump(final_result, f, indent=2)
266
321
 
267
- return "Trace upload initiated. Use get_upload_status() to check the status."
322
+
323
+ print(filepath_3)
324
+
325
+ additional_metadata_keys = additional_metadata.keys() if additional_metadata else None
326
+
327
+ UploadTraces(json_file_path=filepath_3,
328
+ project_name=self.project_name,
329
+ project_id=self.project_id,
330
+ dataset_name=self.dataset_name,
331
+ user_detail=user_detail,
332
+ base_url=self.base_url
333
+ ).upload_traces(additional_metadata_keys=additional_metadata_keys)
334
+
335
+ return
336
+
268
337
  elif self.tracer_type == "llamaindex":
269
338
  from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
270
339
  return LlamaIndexTracer(self._pass_user_data()).stop()
@@ -20,7 +20,7 @@ class UploadTraces:
20
20
  self.base_url = base_url
21
21
  self.timeout = 10
22
22
 
23
- def _create_dataset_schema_with_trace(self):
23
+ def _create_dataset_schema_with_trace(self, additional_metadata_keys=None, additional_pipeline_keys=None):
24
24
  SCHEMA_MAPPING_NEW = {
25
25
  "trace_id": {"columnType": "traceId"},
26
26
  "trace_uri": {"columnType": "traceUri"},
@@ -34,6 +34,15 @@ class UploadTraces:
34
34
  "vector_store":{"columnType":"pipeline"},
35
35
  "feedback": {"columnType":"feedBack"}
36
36
  }
37
+
38
+ if additional_metadata_keys:
39
+ for key in additional_metadata_keys:
40
+ SCHEMA_MAPPING_NEW[key] = {"columnType": "metadata"}
41
+
42
+ if additional_pipeline_keys:
43
+ for key in additional_pipeline_keys:
44
+ SCHEMA_MAPPING_NEW[key] = {"columnType": "pipeline"}
45
+
37
46
  def make_request():
38
47
  headers = {
39
48
  "Content-Type": "application/json",
@@ -119,9 +128,14 @@ class UploadTraces:
119
128
  data=payload,
120
129
  timeout=self.timeout)
121
130
 
122
- def upload_traces(self):
123
- self._create_dataset_schema_with_trace()
124
- presignedUrl = self._get_presigned_url()
125
- self._put_presigned_url(presignedUrl, self.json_file_path)
126
- self._insert_traces(presignedUrl)
127
- print("Traces uploaded")
131
+ def upload_traces(self, additional_metadata_keys=None, additional_pipeline_keys=None):
132
+ try:
133
+ self._create_dataset_schema_with_trace(additional_metadata_keys, additional_pipeline_keys)
134
+ presignedUrl = self._get_presigned_url()
135
+ if presignedUrl is None:
136
+ return
137
+ self._put_presigned_url(presignedUrl, self.json_file_path)
138
+ self._insert_traces(presignedUrl)
139
+ print("Traces uploaded")
140
+ except Exception as e:
141
+ print(f"Error while uploading agentic traces: {e}")
@@ -0,0 +1,61 @@
1
+ import json
2
+
3
+ def convert_langchain_callbacks_output(result, project_name="", metadata="", pipeline=""):
4
+ initial_struc = [{
5
+ "project_name": project_name,
6
+ "trace_id": "NA",
7
+ "session_id": "NA",
8
+ "metadata" : metadata,
9
+ "pipeline" : pipeline,
10
+ "traces" : []
11
+ }]
12
+ traces_data = []
13
+
14
+ prompt = result["data"]["prompt"]
15
+ response = result["data"]["response"]
16
+ context = result["data"]["context"]
17
+ final_prompt = ""
18
+
19
+ prompt_structured_data = {
20
+ "traceloop.entity.input": json.dumps({
21
+ "kwargs": {
22
+ "input": prompt,
23
+ }
24
+ })
25
+ }
26
+ prompt_data = {
27
+ "name": "retrieve_documents.langchain.workflow",
28
+ "attributes": prompt_structured_data,
29
+ }
30
+
31
+ traces_data.append(prompt_data)
32
+
33
+ context_structured_data = {
34
+ "traceloop.entity.input": json.dumps({
35
+ "kwargs": {
36
+ "context": context
37
+ }
38
+ }),
39
+ "traceloop.entity.output": json.dumps({
40
+ "kwargs": {
41
+ "text": prompt
42
+ }
43
+ })
44
+ }
45
+ context_data = {
46
+ "name": "PromptTemplate.langchain.task",
47
+ "attributes": context_structured_data,
48
+ }
49
+ traces_data.append(context_data)
50
+
51
+ response_structured_data = {"gen_ai.completion.0.content": response,
52
+ "gen_ai.prompt.0.content": prompt}
53
+ response_data = {
54
+ "name": "ChatOpenAI.langchain.task",
55
+ "attributes" : response_structured_data
56
+ }
57
+ traces_data.append(response_data)
58
+
59
+ initial_struc[0]["traces"] = traces_data
60
+
61
+ return initial_struc
@@ -0,0 +1,81 @@
1
+ import json
2
+ import uuid
3
+
4
+ def langchain_tracer_extraction(data):
5
+ trace_aggregate = {}
6
+ import uuid
7
+
8
+ def generate_trace_id():
9
+ """
10
+ Generate a random trace ID using UUID4.
11
+ Returns a string representation of the UUID with no hyphens.
12
+ """
13
+ return '0x'+str(uuid.uuid4()).replace('-', '')
14
+
15
+ trace_aggregate["tracer_type"] = "langchain"
16
+ trace_aggregate['trace_id'] = generate_trace_id()
17
+ trace_aggregate['session_id'] = None
18
+ trace_aggregate["pipeline"] = {
19
+ 'llm_model': 'gpt-3.5-turbo',
20
+ 'vector_store': 'faiss',
21
+ 'embed_model': 'text-embedding-ada-002'
22
+ }
23
+ trace_aggregate["metadata"] = {
24
+ 'key1': 'value1',
25
+ 'key2': 'value2',
26
+ 'log_source': 'langchain_tracer',
27
+ 'recorded_on': '2024-06-14 08:57:27.324410'
28
+ }
29
+ trace_aggregate["prompt_length"] = 0
30
+ trace_aggregate["data"] = {}
31
+
32
+ def get_prompt(data):
33
+ # if "chain_starts" in data and data["chain_starts"] != []:
34
+ # for item in data["chain_starts"]:
35
+
36
+ if "chat_model_calls" in data and data["chat_model_calls"] != []:
37
+ for item in data["chat_model_calls"]:
38
+ messages = item["messages"][0]
39
+ for message in messages:
40
+ if message["type"]=="human":
41
+ human_messages = message["content"].strip()
42
+ return human_messages
43
+ if "llm_calls" in data and data["llm_calls"] != []:
44
+ if "llm_start" in data["llm_calls"][0]["event"]:
45
+ for item in data["llm_calls"]:
46
+ prompt = item["prompts"]
47
+ return prompt[0].strip()
48
+
49
+ def get_response(data):
50
+ for item in data["llm_calls"]:
51
+ if item["event"] == "llm_end":
52
+ # import pdb; pdb.set_trace()
53
+ llm_end_responses = item["response"]["generations"][0]
54
+ for llm_end_response in llm_end_responses:
55
+ response = llm_end_response["text"]
56
+ return response.strip()
57
+
58
+ def get_context(data):
59
+ if "retriever_actions" in data and data["retriever_actions"] != []:
60
+ for item in data["retriever_actions"]:
61
+ if item["event"] == "retriever_end":
62
+ context = item["documents"][0]["page_content"].replace('\n', ' ')
63
+ return context
64
+ if "chat_model_calls" in data and data["chat_model_calls"] != []:
65
+ for item in data["chat_model_calls"]:
66
+ messages = item["messages"][0]
67
+ for message in messages:
68
+ if message["type"]=="system":
69
+ content = message["content"].strip().replace('\n', ' ')
70
+ return content
71
+
72
+
73
+ prompt = get_prompt(data)
74
+ response = get_response(data)
75
+ context = get_context(data)
76
+
77
+ trace_aggregate["data"]["prompt"]=prompt
78
+ trace_aggregate["data"]["response"]=response
79
+ trace_aggregate["data"]["context"]=context
80
+
81
+ return trace_aggregate
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: ragaai_catalyst
3
- Version: 2.1.5b1
3
+ Version: 2.1.5b3
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>
6
6
  Requires-Python: <3.13,>=3.9
@@ -1,6 +1,6 @@
1
1
  ragaai_catalyst/__init__.py,sha256=tvESV8UuVtth14E89wQxgf0WvQZSApWfDeLiIdmMhkE,791
2
2
  ragaai_catalyst/_version.py,sha256=JKt9KaVNOMVeGs8ojO6LvIZr7ZkMzNN-gCcvryy4x8E,460
3
- ragaai_catalyst/dataset.py,sha256=Mc0L28lO4f7zD8DraQMHWzuizJf8Q0Z-DxNx3pS4OFo,23409
3
+ ragaai_catalyst/dataset.py,sha256=8EbZEpcV4Fb8oO0vhYIK_WIwGyYvJ4VzwGeIZDtu2E8,23475
4
4
  ragaai_catalyst/evaluation.py,sha256=34H2bYZNSrcu0jMQgDZw1OLVbQU80PaVLo2avju8POM,20311
5
5
  ragaai_catalyst/experiment.py,sha256=8yQo1phCHlpnJ-4CqCaIbLXg_1ZlAuLGI9kqGBl-OTE,18859
6
6
  ragaai_catalyst/guard_executor.py,sha256=llPbE3DyVtrybojXknzBZj8-dtUrGBQwi9-ZiPJxGRo,3762
@@ -8,14 +8,14 @@ ragaai_catalyst/guardrails_manager.py,sha256=DILMOAASK57FH9BLq_8yC1AQzRJ8McMFLwC
8
8
  ragaai_catalyst/internal_api_completion.py,sha256=DdICI5yfEudiOAIC8L4oxH0Qz7kX-BZCdo9IWsi2gNo,2965
9
9
  ragaai_catalyst/prompt_manager.py,sha256=W8ypramzOprrJ7-22d5vkBXIuIQ8v9XAzKDGxKsTK28,16550
10
10
  ragaai_catalyst/proxy_call.py,sha256=CHxldeceZUaLU-to_hs_Kf1z_b2vHMssLS_cOBedu78,5499
11
- ragaai_catalyst/ragaai_catalyst.py,sha256=FdqMzwuQLqS2-3JJDsTQ8uh2itllOxfPrRUjb8Kwmn0,17428
11
+ ragaai_catalyst/ragaai_catalyst.py,sha256=5nVg3_-lcvhrXjNkPTeGhe3tdUjm_4ZIctOcqWXBkRA,17939
12
12
  ragaai_catalyst/synthetic_data_generation.py,sha256=uDV9tNwto2xSkWg5XHXUvjErW-4P34CTrxaJpRfezyA,19250
13
13
  ragaai_catalyst/utils.py,sha256=TlhEFwLyRU690HvANbyoRycR3nQ67lxVUQoUOfTPYQ0,3772
14
14
  ragaai_catalyst/tracers/__init__.py,sha256=LfgTes-nHpazssbGKnn8kyLZNr49kIPrlkrqqoTFTfc,301
15
15
  ragaai_catalyst/tracers/distributed.py,sha256=AIRvS5Ur4jbFDXsUkYuCTmtGoHHx3LOG4n5tWOh610U,10330
16
16
  ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
17
- ragaai_catalyst/tracers/tracer.py,sha256=S_ANRm5zSMvQiUyQTRwyUepFci_T3AN26wAOXoURfyc,15648
18
- ragaai_catalyst/tracers/upload_traces.py,sha256=mT5rverNUL5Rcal9VR5_c75wHBAUrm2pvYetTZqP3ok,4796
17
+ ragaai_catalyst/tracers/tracer.py,sha256=k2HjH6ONaabbPvoX6xJRck-A2l-9GVW7Nueimuu-Ua8,19096
18
+ ragaai_catalyst/tracers/upload_traces.py,sha256=2TWdRTN6FMaX-dqDv8BJWQS0xrCGYKkXEYOi2kK3Z3Y,5487
19
19
  ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
20
20
  ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
21
21
  ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -28,10 +28,10 @@ ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py,sha256=S4rCcKzU
28
28
  ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py,sha256=Xk1cLzs-2A3dgyBwRRnCWs7Eubki40FVonwd433hPN8,4805
29
29
  ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
30
  ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py,sha256=--wvhOJ-J2433WPatIS3wx6VFeCUIcgRT5_ZjGQDv2c,26364
31
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=RFHbmzFdkl1zV4ZnM6VOAnCeTMqSKmgdH8VuWsrBwf4,40120
31
+ ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=88rX7OkOGEyVNECUrc4bYqODyulXve_-99d9ku5hBeQ,37373
32
32
  ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py,sha256=l3x3uFO5ov93I7UUrUX1M06WVGy2ug2jEZ1G7o315z4,13075
33
33
  ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
34
- ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=fuSVks5-OchQLXqN6T18CtDjE8ufrCcksckcC3WW-_k,30150
34
+ ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=s6BRoBteCRF8XrXGnmZ98ZWPrSONC5RObPXNaq-im3w,31782
35
35
  ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=6hsg-Yw11v4qeELI1CWrdX8BXf-wJrTF5smBI5prgoo,15873
36
36
  ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py,sha256=m8CxYkl7iMiFya_lNwN1ykBc3Pmo-2pR_2HmpptwHWQ,10352
37
37
  ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=4rWL7fIJE5wN0nwh6fMWyh3OrrenZHJkNzyQXikyzQI,13771
@@ -49,6 +49,7 @@ ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py,sha256=v
49
49
  ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=wlXCuaRe81s-7FWdJ_MquXFGRZZfNrZxLIIxl-Ohbqk,15541
50
50
  ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=GXV1s349reRMpYF_EkK-b6peSb4SY-17WnlkvpuQ4sM,294430
51
51
  ragaai_catalyst/tracers/agentic_tracing/utils/span_attributes.py,sha256=MqeRNGxzeuh9qTK0NbYMftl9V9Z0V7gMgBoHkrXP56k,1592
52
+ ragaai_catalyst/tracers/agentic_tracing/utils/system_monitor.py,sha256=H8WNsk4v_5T6OUw4TFOzlDLjQhJwjh1nAMyMAoqMEi4,6946
52
53
  ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=RciiDdo2riibEoM8X0FKHaXi78y3bWwNkV8U0leqigk,3508
53
54
  ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=DQHjcEuqEKsNSWaNs7SoOaq50yK4Jsl966S7mBnV-zA,5723
54
55
  ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=8SAN4pjA3XKBWlUZiDGXGh26ve1qRH_fv0VwewsfCFc,18818
@@ -60,9 +61,11 @@ ragaai_catalyst/tracers/instrumentators/langchain.py,sha256=yMN0qVF0pUVk6R5M1vJo
60
61
  ragaai_catalyst/tracers/instrumentators/llamaindex.py,sha256=SMrRlR4xM7k9HK43hakE8rkrWHxMlmtmWD-AX6TeByc,416
61
62
  ragaai_catalyst/tracers/instrumentators/openai.py,sha256=14R4KW9wQCR1xysLfsP_nxS7cqXrTPoD8En4MBAaZUU,379
62
63
  ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
64
+ ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
65
+ ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=cghjCuUe8w-2MZdh9xgtRGe3y219u26GGzpnuY4Wt6Q,3047
63
66
  ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
64
- ragaai_catalyst-2.1.5b1.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
65
- ragaai_catalyst-2.1.5b1.dist-info/METADATA,sha256=raEM7cStptlPTu7ph3OyOU7UanQ3_7-T9dQbFXlFMbs,12764
66
- ragaai_catalyst-2.1.5b1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
67
- ragaai_catalyst-2.1.5b1.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
68
- ragaai_catalyst-2.1.5b1.dist-info/RECORD,,
67
+ ragaai_catalyst-2.1.5b3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
68
+ ragaai_catalyst-2.1.5b3.dist-info/METADATA,sha256=i-IVw7tVuDCXGNCIBH8Lsovatn4x67VrhV-hf-HYWYQ,12764
69
+ ragaai_catalyst-2.1.5b3.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
70
+ ragaai_catalyst-2.1.5b3.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
71
+ ragaai_catalyst-2.1.5b3.dist-info/RECORD,,