ragaai-catalyst 2.1.6.2__py3-none-any.whl → 2.1.6.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -92,6 +92,7 @@ class BaseTracer:
92
92
  self._upload_tasks = []
93
93
  self._is_uploading = False
94
94
  self._upload_completed_callback = None
95
+ self.timeout = self.user_details.get("timeout", 120)
95
96
 
96
97
  ensure_uploader_running()
97
98
 
@@ -314,7 +315,8 @@ class BaseTracer:
314
315
  project_id=self.project_id,
315
316
  dataset_name=self.dataset_name,
316
317
  user_details=self.user_details,
317
- base_url=self.base_url
318
+ base_url=self.base_url,
319
+ timeout=self.timeout
318
320
  )
319
321
 
320
322
  # For backward compatibility
@@ -48,7 +48,7 @@ from ragaai_catalyst.tracers.upload_traces import UploadTraces
48
48
  class AgenticTracing(
49
49
  BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMixin, CustomTracerMixin
50
50
  ):
51
- def __init__(self, user_detail, auto_instrumentation=None):
51
+ def __init__(self, user_detail, auto_instrumentation=None, timeout=120):
52
52
  # Initialize all parent classes
53
53
  self.user_interaction_tracer = UserInteractionTracer()
54
54
  LLMTracerMixin.__init__(self)
@@ -60,7 +60,7 @@ class AgenticTracing(
60
60
  self.project_id = user_detail["project_id"]
61
61
  self.trace_user_detail = user_detail["trace_user_detail"]
62
62
  self.base_url = f"{RagaAICatalyst.BASE_URL}"
63
- self.timeout = 10
63
+ self.timeout = timeout
64
64
 
65
65
  # Add warning flag
66
66
  self._warning_shown = False
@@ -77,7 +77,7 @@ def get_executor():
77
77
 
78
78
  def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
79
79
  project_name: str, project_id: str, dataset_name: str,
80
- user_details: Dict[str, Any], base_url: str) -> Dict[str, Any]:
80
+ user_details: Dict[str, Any], base_url: str, timeout=120) -> Dict[str, Any]:
81
81
  """
82
82
  Process a single upload task
83
83
 
@@ -127,12 +127,13 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
127
127
  return result
128
128
 
129
129
  # Step 1: Create dataset schema
130
- logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url}")
130
+ logger.info(f"Creating dataset schema for {dataset_name} with base_url: {base_url} and timeout: {timeout}")
131
131
  try:
132
132
  response = create_dataset_schema_with_trace(
133
133
  dataset_name=dataset_name,
134
134
  project_name=project_name,
135
- base_url=base_url
135
+ base_url=base_url,
136
+ timeout=timeout
136
137
  )
137
138
  logger.info(f"Dataset schema created: {response}")
138
139
  except Exception as e:
@@ -141,13 +142,14 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
141
142
 
142
143
  # Step 2: Upload trace metrics
143
144
  if filepath and os.path.exists(filepath):
144
- logger.info(f"Uploading trace metrics for {filepath}")
145
+ logger.info(f"Uploading trace metrics for {filepath} with base_url: {base_url} and timeout: {timeout}")
145
146
  try:
146
147
  response = upload_trace_metric(
147
148
  json_file_path=filepath,
148
149
  dataset_name=dataset_name,
149
150
  project_name=project_name,
150
- base_url=base_url
151
+ base_url=base_url,
152
+ timeout=timeout
151
153
  )
152
154
  logger.info(f"Trace metrics uploaded: {response}")
153
155
  except Exception as e:
@@ -158,7 +160,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
158
160
 
159
161
  # Step 3: Upload agentic traces
160
162
  if filepath and os.path.exists(filepath):
161
- logger.info(f"Uploading agentic traces for {filepath}")
163
+ logger.info(f"Uploading agentic traces for {filepath} with base_url: {base_url} and timeout: {timeout}")
162
164
  try:
163
165
  upload_traces = UploadAgenticTraces(
164
166
  json_file_path=filepath,
@@ -167,6 +169,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
167
169
  dataset_name=dataset_name,
168
170
  user_detail=user_details,
169
171
  base_url=base_url,
172
+ timeout=timeout
170
173
  )
171
174
  upload_traces.upload_agentic_traces()
172
175
  logger.info("Agentic traces uploaded successfully")
@@ -178,14 +181,15 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
178
181
 
179
182
  # Step 4: Upload code hash
180
183
  if hash_id and zip_path and os.path.exists(zip_path):
181
- logger.info(f"Uploading code hash {hash_id}")
184
+ logger.info(f"Uploading code hash {hash_id} with base_url: {base_url} and timeout: {timeout}")
182
185
  try:
183
186
  response = upload_code(
184
187
  hash_id=hash_id,
185
188
  zip_path=zip_path,
186
189
  project_name=project_name,
187
190
  dataset_name=dataset_name,
188
- base_url=base_url
191
+ base_url=base_url,
192
+ timeout=timeout
189
193
  )
190
194
  logger.info(f"Code hash uploaded: {response}")
191
195
  except Exception as e:
@@ -215,7 +219,7 @@ def save_task_status(task_status: Dict[str, Any]):
215
219
  with open(status_path, "w") as f:
216
220
  json.dump(task_status, f, indent=2)
217
221
 
218
- def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url):
222
+ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url, timeout=120):
219
223
  """
220
224
  Submit a new upload task using futures.
221
225
 
@@ -259,7 +263,8 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
259
263
  project_id=project_id,
260
264
  dataset_name=dataset_name,
261
265
  user_details=user_details,
262
- base_url=base_url
266
+ base_url=base_url,
267
+ timeout=timeout
263
268
  )
264
269
 
265
270
  # Store the future for later status checks
@@ -17,14 +17,16 @@ class UploadAgenticTraces:
17
17
  project_id,
18
18
  dataset_name,
19
19
  user_detail,
20
- base_url):
20
+ base_url,
21
+ timeout=120,
22
+ ):
21
23
  self.json_file_path = json_file_path
22
24
  self.project_name = project_name
23
25
  self.project_id = project_id
24
26
  self.dataset_name = dataset_name
25
27
  self.user_detail = user_detail
26
28
  self.base_url = base_url
27
- self.timeout = 30
29
+ self.timeout = timeout
28
30
 
29
31
 
30
32
  def _get_presigned_url(self):
@@ -9,19 +9,19 @@ logger = logging.getLogger(__name__)
9
9
  from urllib.parse import urlparse, urlunparse
10
10
  import re
11
11
 
12
- def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None):
13
- code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url)
12
+ def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None, timeout=120):
13
+ code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url, timeout=timeout)
14
14
 
15
15
  if hash_id not in code_hashes_list:
16
- presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url)
17
- _put_zip_presigned_url(project_name, presigned_url, zip_path)
16
+ presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url, timeout=timeout)
17
+ _put_zip_presigned_url(project_name, presigned_url, zip_path, timeout=timeout)
18
18
 
19
- response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url)
19
+ response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url, timeout=timeout)
20
20
  return response
21
21
  else:
22
22
  return "Code already exists"
23
23
 
24
- def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
24
+ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None, timeout=120):
25
25
  payload = {}
26
26
  headers = {
27
27
  "Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
@@ -36,7 +36,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
36
36
  endpoint,
37
37
  headers=headers,
38
38
  data=payload,
39
- timeout=99999)
39
+ timeout=timeout)
40
40
  elapsed_ms = (time.time() - start_time) * 1000
41
41
  logger.debug(
42
42
  f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
@@ -66,7 +66,7 @@ def update_presigned_url(presigned_url, base_url):
66
66
  return presigned_url
67
67
 
68
68
 
69
- def _fetch_presigned_url(project_name, dataset_name, base_url=None):
69
+ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120):
70
70
  payload = json.dumps({
71
71
  "datasetName": dataset_name,
72
72
  "numFiles": 1,
@@ -87,7 +87,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None):
87
87
  endpoint,
88
88
  headers=headers,
89
89
  data=payload,
90
- timeout=99999)
90
+ timeout=timeout)
91
91
  elapsed_ms = (time.time() - start_time) * 1000
92
92
  logger.debug(
93
93
  f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
@@ -102,7 +102,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None):
102
102
  logger.error(f"Failed to list datasets: {e}")
103
103
  raise
104
104
 
105
- def _put_zip_presigned_url(project_name, presignedUrl, filename):
105
+ def _put_zip_presigned_url(project_name, presignedUrl, filename, timeout=120):
106
106
  headers = {
107
107
  "X-Project-Name": project_name,
108
108
  "Content-Type": "application/zip",
@@ -119,14 +119,14 @@ def _put_zip_presigned_url(project_name, presignedUrl, filename):
119
119
  presignedUrl,
120
120
  headers=headers,
121
121
  data=payload,
122
- timeout=99999)
122
+ timeout=timeout)
123
123
  elapsed_ms = (time.time() - start_time) * 1000
124
124
  logger.debug(
125
125
  f"API Call: [PUT] {presignedUrl} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
126
126
  if response.status_code != 200 or response.status_code != 201:
127
127
  return response, response.status_code
128
128
 
129
- def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None):
129
+ def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None, timeout=120):
130
130
  payload = json.dumps({
131
131
  "datasetName": dataset_name,
132
132
  "codeHash": hash_id,
@@ -147,7 +147,7 @@ def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=No
147
147
  endpoint,
148
148
  headers=headers,
149
149
  data=payload,
150
- timeout=99999)
150
+ timeout=timeout)
151
151
  elapsed_ms = (time.time() - start_time) * 1000
152
152
  logger.debug(
153
153
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
@@ -15,7 +15,7 @@ logging_level = (
15
15
  )
16
16
 
17
17
 
18
- def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None):
18
+ def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None, timeout=120):
19
19
  try:
20
20
  with open(json_file_path, "r") as f:
21
21
  traces = json.load(f)
@@ -51,7 +51,7 @@ def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=Non
51
51
  endpoint,
52
52
  headers=headers,
53
53
  data=payload,
54
- timeout=10)
54
+ timeout=timeout)
55
55
  elapsed_ms = (time.time() - start_time) * 1000
56
56
  logger.debug(
57
57
  f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
@@ -4,7 +4,7 @@ import re
4
4
  import requests
5
5
  from ragaai_catalyst.tracers.agentic_tracing.tracers.base import RagaAICatalyst
6
6
 
7
- def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None):
7
+ def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, timeout=120):
8
8
  def make_request():
9
9
  headers = {
10
10
  "Content-Type": "application/json",
@@ -21,7 +21,7 @@ def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None):
21
21
  f"{url_base}/v1/llm/dataset/logs",
22
22
  headers=headers,
23
23
  data=payload,
24
- timeout=10
24
+ timeout=timeout
25
25
  )
26
26
  return response
27
27
  response = make_request()
@@ -14,7 +14,7 @@ class DynamicTraceExporter(SpanExporter):
14
14
  certain properties to be updated dynamically during execution.
15
15
  """
16
16
 
17
- def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
17
+ def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120):
18
18
  """
19
19
  Initialize the DynamicTraceExporter.
20
20
 
@@ -33,7 +33,8 @@ class DynamicTraceExporter(SpanExporter):
33
33
  dataset_name=dataset_name,
34
34
  user_details=user_details,
35
35
  base_url=base_url,
36
- custom_model_cost=custom_model_cost
36
+ custom_model_cost=custom_model_cost,
37
+ timeout=timeout
37
38
  )
38
39
 
39
40
  # Store the initial values
@@ -19,7 +19,7 @@ logging_level = (
19
19
 
20
20
 
21
21
  class RAGATraceExporter(SpanExporter):
22
- def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
22
+ def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120):
23
23
  self.trace_spans = dict()
24
24
  self.tmp_dir = tempfile.gettempdir()
25
25
  self.files_to_zip = files_to_zip
@@ -30,6 +30,7 @@ class RAGATraceExporter(SpanExporter):
30
30
  self.base_url = base_url
31
31
  self.custom_model_cost = custom_model_cost
32
32
  self.system_monitor = SystemMonitor(dataset_name)
33
+ self.timeout = timeout
33
34
 
34
35
  def export(self, spans):
35
36
  for span in spans:
@@ -122,7 +123,8 @@ class RAGATraceExporter(SpanExporter):
122
123
  project_id=self.project_id,
123
124
  dataset_name=self.dataset_name,
124
125
  user_details=self.user_details,
125
- base_url=self.base_url
126
+ base_url=self.base_url,
127
+ timeout=self.timeout
126
128
  )
127
129
 
128
130
  logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
@@ -37,7 +37,6 @@ logging_level = (
37
37
 
38
38
  class Tracer(AgenticTracing):
39
39
  NUM_PROJECTS = 99999
40
- TIMEOUT = 10
41
40
  def __init__(
42
41
  self,
43
42
  project_name,
@@ -47,7 +46,7 @@ class Tracer(AgenticTracing):
47
46
  pipeline=None,
48
47
  metadata=None,
49
48
  description=None,
50
- upload_timeout=30, # Default timeout of 30 seconds
49
+ timeout=120, # Default timeout of 120 seconds
51
50
  update_llm_cost=True, # Parameter to control model cost updates
52
51
  auto_instrumentation={ # to control automatic instrumentation of different components
53
52
  'llm':True,
@@ -72,7 +71,7 @@ class Tracer(AgenticTracing):
72
71
  pipeline (dict, optional): The pipeline configuration. Defaults to None.
73
72
  metadata (dict, optional): The metadata. Defaults to None.
74
73
  description (str, optional): The description. Defaults to None.
75
- upload_timeout (int, optional): The upload timeout in seconds. Defaults to 30.
74
+ timeout (int, optional): The upload timeout in seconds. Defaults to 30.
76
75
  update_llm_cost (bool, optional): Whether to update model costs from GitHub. Defaults to True.
77
76
  """
78
77
 
@@ -133,9 +132,8 @@ class Tracer(AgenticTracing):
133
132
  # self.metadata["total_tokens"] = 0
134
133
  self.pipeline = pipeline
135
134
  self.description = description
136
- self.upload_timeout = upload_timeout
135
+ self.timeout = timeout
137
136
  self.base_url = f"{RagaAICatalyst.BASE_URL}"
138
- self.timeout = 30
139
137
  self.num_projects = 99999
140
138
  self.start_time = datetime.datetime.now().astimezone().isoformat()
141
139
  self.model_cost_dict = model_cost
@@ -399,7 +397,7 @@ class Tracer(AgenticTracing):
399
397
  'pipeline': self.pipeline,
400
398
  'metadata': self.metadata,
401
399
  'description': self.description,
402
- 'upload_timeout': self.upload_timeout
400
+ 'timeout': self.timeout
403
401
  }
404
402
 
405
403
  # Reinitialize self with new dataset_name and stored parameters
@@ -489,7 +487,10 @@ class Tracer(AgenticTracing):
489
487
  # Add cost if possible
490
488
  if additional_metadata.get('model_name'):
491
489
  try:
492
- model_cost_data = self.model_cost_dict[additional_metadata['model_name']]
490
+ if self.model_custom_cost.get(additional_metadata['model_name']):
491
+ model_cost_data = self.model_custom_cost[additional_metadata['model_name']]
492
+ else:
493
+ model_cost_data = self.model_cost_dict[additional_metadata['model_name']]
493
494
  if 'tokens' in additional_metadata and all(k in additional_metadata['tokens'] for k in ['prompt', 'completion']):
494
495
  prompt_cost = additional_metadata["tokens"]["prompt"]*model_cost_data["input_cost_per_token"]
495
496
  completion_cost = additional_metadata["tokens"]["completion"]*model_cost_data["output_cost_per_token"]
@@ -615,11 +616,11 @@ class Tracer(AgenticTracing):
615
616
 
616
617
  This function uploads the traces generated by the RagaAICatalyst client to the RagaAICatalyst server. It uses the `aiohttp` library to make an asynchronous HTTP request to the server. The function first checks if the `RAGAAI_CATALYST_TOKEN` environment variable is set. If not, it raises a `ValueError` with the message "RAGAAI_CATALYST_TOKEN not found. Cannot upload traces.".
617
618
 
618
- The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `upload_timeout` attribute of the `Tracer` object.
619
+ The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `timeout` attribute of the `Tracer` object.
619
620
 
620
621
  If the upload is successful, the function returns the string "Files uploaded successfully" if the `upload_stat` variable is truthy, otherwise it returns the string "No files to upload".
621
622
 
622
- If the upload times out, the function returns a string with the message "Upload timed out after {self.upload_timeout} seconds".
623
+ If the upload times out, the function returns a string with the message "Upload timed out after {self.timeout} seconds".
623
624
 
624
625
  If any other exception occurs during the upload, the function returns a string with the message "Upload failed: {str(e)}", where `{str(e)}` is the string representation of the exception.
625
626
 
@@ -641,7 +642,7 @@ class Tracer(AgenticTracing):
641
642
  session=session,
642
643
  file_paths=[self.filespanx.sync_file],
643
644
  ),
644
- timeout=self.upload_timeout,
645
+ timeout=self.timeout,
645
646
  )
646
647
  return (
647
648
  "Files uploaded successfully"
@@ -649,7 +650,7 @@ class Tracer(AgenticTracing):
649
650
  else "No files to upload"
650
651
  )
651
652
  except asyncio.TimeoutError:
652
- return f"Upload timed out after {self.upload_timeout} seconds"
653
+ return f"Upload timed out after {self.timeout} seconds"
653
654
  except Exception as e:
654
655
  return f"Upload failed: {str(e)}"
655
656
 
@@ -749,7 +750,8 @@ class Tracer(AgenticTracing):
749
750
  dataset_name=self.dataset_name,
750
751
  user_details=self.user_details,
751
752
  base_url=self.base_url,
752
- custom_model_cost=self.model_custom_cost
753
+ custom_model_cost=self.model_custom_cost,
754
+ timeout=self.timeout
753
755
  )
754
756
 
755
757
  # Set up tracer provider
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ragaai_catalyst
3
- Version: 2.1.6.2
3
+ Version: 2.1.6.3
4
4
  Summary: RAGA AI CATALYST
5
5
  Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
6
6
  Requires-Python: <3.13,>=3.10
@@ -31,7 +31,7 @@ ragaai_catalyst/tracers/distributed.py,sha256=MwlBwIxCAng-OI-7Ove_rkE1mTLeuW4Jw-
31
31
  ragaai_catalyst/tracers/langchain_callback.py,sha256=CB75zzG3-DkYTELj0vI1MOHQTY0MuQJfoHIXz9Cl8S8,34568
32
32
  ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
33
33
  ragaai_catalyst/tracers/llamaindex_instrumentation.py,sha256=Ys_jLkvVqo12bKgXDmkp4TxJu9HkBATrFE8cIcTYxWw,14329
34
- ragaai_catalyst/tracers/tracer.py,sha256=juMsA9qkKffoSWYx3a-iTNCY6SnKxjMoYvTSGQ-r-Gk,37080
34
+ ragaai_catalyst/tracers/tracer.py,sha256=mfh0pzbivv2Aj9yUiC92eplmCzBS4EzfS0H2TS5NZyc,37219
35
35
  ragaai_catalyst/tracers/upload_traces.py,sha256=nqH6Ldng33VhEGcvQberyWKZ1WvLuBRoySEit8b0f7s,5882
36
36
  ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
37
37
  ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
@@ -45,23 +45,23 @@ ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py,sha256=S4rCcKzU
45
45
  ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py,sha256=Xk1cLzs-2A3dgyBwRRnCWs7Eubki40FVonwd433hPN8,4805
46
46
  ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
47
  ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py,sha256=LzbsHvELwBmH8ObFomJRhiQ98b6MEi18irm0DPiplt0,29743
48
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=Kmy1kgwy19e7MuMMq9GPUq9VXpJV2bXeaIhx8UxX5Sc,54251
48
+ ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=5O8GKv5U_L3biLpsk5HEW21yiKWCJ8H4ijYjHrTzcW4,54350
49
49
  ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py,sha256=OBJJjFSvwRjCGNJyqX3yIfC1W05ZN2QUXasCJ4gmCjQ,13930
50
50
  ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
51
  ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=z-qzmCQCkhyW0aLDUR_rNq4pmxhAaVhNY-kZQsox-Ws,50221
52
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=aiFobQb5ePPhyRADXJTZgI8_PrSGhjXnOu9W_o3ngEA,16148
52
+ ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=9Kn6gppITCJ8obLyAGgfpWOlyqKfx2Zd4RkkMZMHYn8,16166
53
53
  ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py,sha256=m8CxYkl7iMiFya_lNwN1ykBc3Pmo-2pR_2HmpptwHWQ,10352
54
54
  ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=xxrliKPfdfbIZRZqMnUewsaTD8_Hv0dbuoBivNZGD4U,21674
55
55
  ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py,sha256=bhSUhNQCuJXKjgJAXhjKEYjnHMpYN90FSZdR84fNIKU,4614
56
56
  ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
57
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=UnGpcMpRbntUrYsIU11r-gMHtzNkDGSGCbepiL_XTFA,12379
58
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=icycLgfA0734xxoM1rTMG_iIrI3iM94th8RQggJ7sSw,8541
59
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=aw_eHhUYRbR_9IbIkNjYb7NOsmETD3k1p4a6gxaGI7Q,6462
57
+ ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=w1taoTdOB2Lhrmi90fenyIHqUqYvWdNS_C6pZqGbd5M,12743
58
+ ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=9s1cXH1rZ2EMPMsJ7zls8BRVVHdyGNXK-SoIF7P2MxM,8595
59
+ ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=bRJGqdLPm0fTORAdUGRo0aDPiZeqB30gK_iC7SsymL4,6603
60
60
  ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py,sha256=m1O8lKpxKwtHofXLW3fTHX5yfqDW5GxoveARlg5cTw4,2571
61
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py,sha256=dmgcFZtCUpTGV2GayKQ9znaFRDLlXY8NW6RiyNwerOk,4278
61
+ ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py,sha256=hRuh-cczHbeM_Spbf9HTYd149uSs1zP0TvkYuZKF4ec,4296
62
62
  ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=XdB3X_ufe4RVvGorxSqAiB9dYv4UD7Hvvuw3bsDUppY,60
63
63
  ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py,sha256=JyNCbfpW-w4O9CjtemTqmor2Rh1WGpQwhRaDSRmBxw8,689
64
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=xHTe7YiCmCY7kRxe0kB7wwo_ueT1UB_hnAA36R2m-pQ,941
64
+ ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=TQ69gOp1UDV16Icp7uhWKXkadPsOnZgkHRgNvsi7jlg,959
65
65
  ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py,sha256=YG601l1a29ov9VPu9Vl4RXxgL7l16k54_WWnoTNoG58,2064
66
66
  ragaai_catalyst/tracers/agentic_tracing/utils/generic.py,sha256=WwXT01xmp8MSr7KinuDCSK9a1ifpLcT7ajFkvYviG_A,1190
67
67
  ragaai_catalyst/tracers/agentic_tracing/utils/get_user_trace_metrics.py,sha256=vPZ4dn4EHFW0kqd1GyRpsYXbfrRrd0DXCmh-pzsDBNE,1109
@@ -74,10 +74,10 @@ ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=rssHolDvKxZ9
74
74
  ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=G027toV-Km20JjKrc-Y_PilQ8ABEKrBvvzgLTnqVg7I,5819
75
75
  ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=4TeCGsFF26249fV6dJHLTZDrRa93SG9oer4rudoF8Y4,19443
76
76
  ragaai_catalyst/tracers/exporters/__init__.py,sha256=wQbaqyeIjVZxYprHCKZ9BeiqxeXYBKjzEgP79LWNxCU,293
77
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=w9U8UTxvTbGTDUoMtsgy2BsdpYp-APTKFdGV4o5JPaM,5051
77
+ ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=oFIubzCCmC3fpoH_ybohevjOE5WIEG9ahcPIEtwn99c,5093
78
78
  ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=RgGteu-NVGprXKkynvyIO5yOjpbtA41R3W_NzCjnkwE,6445
79
79
  ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=6xvjWXyh8XPkHKSLLmAZUQSvwuyY17ov8pv2VdfI0qA,17875
80
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=HZG1UjcipgQOHkeqQHVGxenIab2mHqcVmWqtOXlMt6Q,5305
80
+ ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=n78Yky-32yVI8fiL_3yD-iShgizb8IcN8OqQ5gOwZ9w,5387
81
81
  ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
82
82
  ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
83
83
  ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
@@ -87,8 +87,8 @@ ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=XS2_x2
87
87
  ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256=C3uwkibJ08C9sOX-54kulZYmJlIpZ-SQpfE6HNGrjbM,343502
88
88
  ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=RH9dkCnPaSygvPvAilRE4lUdUaRtALJKH85E4jHcVoM,14072
89
89
  ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
90
- ragaai_catalyst-2.1.6.2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
91
- ragaai_catalyst-2.1.6.2.dist-info/METADATA,sha256=yVT5tnmPWjT9KjQBeGsZ1-M5WkefYDDkk4871QZ_oGE,22139
92
- ragaai_catalyst-2.1.6.2.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
93
- ragaai_catalyst-2.1.6.2.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
94
- ragaai_catalyst-2.1.6.2.dist-info/RECORD,,
90
+ ragaai_catalyst-2.1.6.3.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
91
+ ragaai_catalyst-2.1.6.3.dist-info/METADATA,sha256=xMzt242kGCFNMAro9sWkSenY09ZHOjPadqkPIIEhJzE,22139
92
+ ragaai_catalyst-2.1.6.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
93
+ ragaai_catalyst-2.1.6.3.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
94
+ ragaai_catalyst-2.1.6.3.dist-info/RECORD,,