ragaai-catalyst 2.1.6.1__py3-none-any.whl → 2.1.6.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +1 -3
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +2 -2
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +5 -9
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +1 -1
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +13 -13
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py +2 -2
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +2 -3
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +2 -4
- ragaai_catalyst/tracers/tracer.py +11 -10
- {ragaai_catalyst-2.1.6.1.dist-info → ragaai_catalyst-2.1.6.2.dist-info}/METADATA +1 -1
- {ragaai_catalyst-2.1.6.1.dist-info → ragaai_catalyst-2.1.6.2.dist-info}/RECORD +14 -14
- {ragaai_catalyst-2.1.6.1.dist-info → ragaai_catalyst-2.1.6.2.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.1.6.1.dist-info → ragaai_catalyst-2.1.6.2.dist-info}/licenses/LICENSE +0 -0
- {ragaai_catalyst-2.1.6.1.dist-info → ragaai_catalyst-2.1.6.2.dist-info}/top_level.txt +0 -0
@@ -92,7 +92,6 @@ class BaseTracer:
|
|
92
92
|
self._upload_tasks = []
|
93
93
|
self._is_uploading = False
|
94
94
|
self._upload_completed_callback = None
|
95
|
-
self.timeout = self.user_details.get("timeout", 120)
|
96
95
|
|
97
96
|
ensure_uploader_running()
|
98
97
|
|
@@ -315,8 +314,7 @@ class BaseTracer:
|
|
315
314
|
project_id=self.project_id,
|
316
315
|
dataset_name=self.dataset_name,
|
317
316
|
user_details=self.user_details,
|
318
|
-
base_url=self.base_url
|
319
|
-
timeout=self.timeout
|
317
|
+
base_url=self.base_url
|
320
318
|
)
|
321
319
|
|
322
320
|
# For backward compatibility
|
@@ -48,7 +48,7 @@ from ragaai_catalyst.tracers.upload_traces import UploadTraces
|
|
48
48
|
class AgenticTracing(
|
49
49
|
BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMixin, CustomTracerMixin
|
50
50
|
):
|
51
|
-
def __init__(self, user_detail, auto_instrumentation=None
|
51
|
+
def __init__(self, user_detail, auto_instrumentation=None):
|
52
52
|
# Initialize all parent classes
|
53
53
|
self.user_interaction_tracer = UserInteractionTracer()
|
54
54
|
LLMTracerMixin.__init__(self)
|
@@ -60,7 +60,7 @@ class AgenticTracing(
|
|
60
60
|
self.project_id = user_detail["project_id"]
|
61
61
|
self.trace_user_detail = user_detail["trace_user_detail"]
|
62
62
|
self.base_url = f"{RagaAICatalyst.BASE_URL}"
|
63
|
-
self.timeout =
|
63
|
+
self.timeout = 10
|
64
64
|
|
65
65
|
# Add warning flag
|
66
66
|
self._warning_shown = False
|
@@ -77,7 +77,7 @@ def get_executor():
|
|
77
77
|
|
78
78
|
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
79
79
|
project_name: str, project_id: str, dataset_name: str,
|
80
|
-
user_details: Dict[str, Any], base_url: str
|
80
|
+
user_details: Dict[str, Any], base_url: str) -> Dict[str, Any]:
|
81
81
|
"""
|
82
82
|
Process a single upload task
|
83
83
|
|
@@ -147,8 +147,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
147
147
|
json_file_path=filepath,
|
148
148
|
dataset_name=dataset_name,
|
149
149
|
project_name=project_name,
|
150
|
-
base_url=base_url
|
151
|
-
timeout=timeout
|
150
|
+
base_url=base_url
|
152
151
|
)
|
153
152
|
logger.info(f"Trace metrics uploaded: {response}")
|
154
153
|
except Exception as e:
|
@@ -168,7 +167,6 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
168
167
|
dataset_name=dataset_name,
|
169
168
|
user_detail=user_details,
|
170
169
|
base_url=base_url,
|
171
|
-
timeout=timeout
|
172
170
|
)
|
173
171
|
upload_traces.upload_agentic_traces()
|
174
172
|
logger.info("Agentic traces uploaded successfully")
|
@@ -187,8 +185,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
187
185
|
zip_path=zip_path,
|
188
186
|
project_name=project_name,
|
189
187
|
dataset_name=dataset_name,
|
190
|
-
base_url=base_url
|
191
|
-
timeout=timeout
|
188
|
+
base_url=base_url
|
192
189
|
)
|
193
190
|
logger.info(f"Code hash uploaded: {response}")
|
194
191
|
except Exception as e:
|
@@ -218,7 +215,7 @@ def save_task_status(task_status: Dict[str, Any]):
|
|
218
215
|
with open(status_path, "w") as f:
|
219
216
|
json.dump(task_status, f, indent=2)
|
220
217
|
|
221
|
-
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url
|
218
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url):
|
222
219
|
"""
|
223
220
|
Submit a new upload task using futures.
|
224
221
|
|
@@ -262,8 +259,7 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
262
259
|
project_id=project_id,
|
263
260
|
dataset_name=dataset_name,
|
264
261
|
user_details=user_details,
|
265
|
-
base_url=base_url
|
266
|
-
timeout=timeout
|
262
|
+
base_url=base_url
|
267
263
|
)
|
268
264
|
|
269
265
|
# Store the future for later status checks
|
@@ -9,19 +9,19 @@ logger = logging.getLogger(__name__)
|
|
9
9
|
from urllib.parse import urlparse, urlunparse
|
10
10
|
import re
|
11
11
|
|
12
|
-
def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None
|
13
|
-
code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url
|
12
|
+
def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None):
|
13
|
+
code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url)
|
14
14
|
|
15
15
|
if hash_id not in code_hashes_list:
|
16
|
-
presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url
|
17
|
-
_put_zip_presigned_url(project_name, presigned_url, zip_path
|
16
|
+
presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url)
|
17
|
+
_put_zip_presigned_url(project_name, presigned_url, zip_path)
|
18
18
|
|
19
|
-
response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url
|
19
|
+
response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url)
|
20
20
|
return response
|
21
21
|
else:
|
22
22
|
return "Code already exists"
|
23
23
|
|
24
|
-
def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None
|
24
|
+
def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
|
25
25
|
payload = {}
|
26
26
|
headers = {
|
27
27
|
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
@@ -36,7 +36,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None, timeou
|
|
36
36
|
endpoint,
|
37
37
|
headers=headers,
|
38
38
|
data=payload,
|
39
|
-
timeout=
|
39
|
+
timeout=99999)
|
40
40
|
elapsed_ms = (time.time() - start_time) * 1000
|
41
41
|
logger.debug(
|
42
42
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -66,7 +66,7 @@ def update_presigned_url(presigned_url, base_url):
|
|
66
66
|
return presigned_url
|
67
67
|
|
68
68
|
|
69
|
-
def _fetch_presigned_url(project_name, dataset_name, base_url=None
|
69
|
+
def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
70
70
|
payload = json.dumps({
|
71
71
|
"datasetName": dataset_name,
|
72
72
|
"numFiles": 1,
|
@@ -87,7 +87,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
|
|
87
87
|
endpoint,
|
88
88
|
headers=headers,
|
89
89
|
data=payload,
|
90
|
-
timeout=
|
90
|
+
timeout=99999)
|
91
91
|
elapsed_ms = (time.time() - start_time) * 1000
|
92
92
|
logger.debug(
|
93
93
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -102,7 +102,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
|
|
102
102
|
logger.error(f"Failed to list datasets: {e}")
|
103
103
|
raise
|
104
104
|
|
105
|
-
def _put_zip_presigned_url(project_name, presignedUrl, filename
|
105
|
+
def _put_zip_presigned_url(project_name, presignedUrl, filename):
|
106
106
|
headers = {
|
107
107
|
"X-Project-Name": project_name,
|
108
108
|
"Content-Type": "application/zip",
|
@@ -119,14 +119,14 @@ def _put_zip_presigned_url(project_name, presignedUrl, filename, timeout=120):
|
|
119
119
|
presignedUrl,
|
120
120
|
headers=headers,
|
121
121
|
data=payload,
|
122
|
-
timeout=
|
122
|
+
timeout=99999)
|
123
123
|
elapsed_ms = (time.time() - start_time) * 1000
|
124
124
|
logger.debug(
|
125
125
|
f"API Call: [PUT] {presignedUrl} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
126
126
|
if response.status_code != 200 or response.status_code != 201:
|
127
127
|
return response, response.status_code
|
128
128
|
|
129
|
-
def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None
|
129
|
+
def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None):
|
130
130
|
payload = json.dumps({
|
131
131
|
"datasetName": dataset_name,
|
132
132
|
"codeHash": hash_id,
|
@@ -147,7 +147,7 @@ def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=No
|
|
147
147
|
endpoint,
|
148
148
|
headers=headers,
|
149
149
|
data=payload,
|
150
|
-
timeout=
|
150
|
+
timeout=99999)
|
151
151
|
elapsed_ms = (time.time() - start_time) * 1000
|
152
152
|
logger.debug(
|
153
153
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -15,7 +15,7 @@ logging_level = (
|
|
15
15
|
)
|
16
16
|
|
17
17
|
|
18
|
-
def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None
|
18
|
+
def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None):
|
19
19
|
try:
|
20
20
|
with open(json_file_path, "r") as f:
|
21
21
|
traces = json.load(f)
|
@@ -51,7 +51,7 @@ def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=Non
|
|
51
51
|
endpoint,
|
52
52
|
headers=headers,
|
53
53
|
data=payload,
|
54
|
-
timeout=
|
54
|
+
timeout=10)
|
55
55
|
elapsed_ms = (time.time() - start_time) * 1000
|
56
56
|
logger.debug(
|
57
57
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -14,7 +14,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
14
14
|
certain properties to be updated dynamically during execution.
|
15
15
|
"""
|
16
16
|
|
17
|
-
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost
|
17
|
+
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
|
18
18
|
"""
|
19
19
|
Initialize the DynamicTraceExporter.
|
20
20
|
|
@@ -33,8 +33,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
33
33
|
dataset_name=dataset_name,
|
34
34
|
user_details=user_details,
|
35
35
|
base_url=base_url,
|
36
|
-
custom_model_cost=custom_model_cost
|
37
|
-
timeout=timeout
|
36
|
+
custom_model_cost=custom_model_cost
|
38
37
|
)
|
39
38
|
|
40
39
|
# Store the initial values
|
@@ -19,7 +19,7 @@ logging_level = (
|
|
19
19
|
|
20
20
|
|
21
21
|
class RAGATraceExporter(SpanExporter):
|
22
|
-
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost
|
22
|
+
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
|
23
23
|
self.trace_spans = dict()
|
24
24
|
self.tmp_dir = tempfile.gettempdir()
|
25
25
|
self.files_to_zip = files_to_zip
|
@@ -30,7 +30,6 @@ class RAGATraceExporter(SpanExporter):
|
|
30
30
|
self.base_url = base_url
|
31
31
|
self.custom_model_cost = custom_model_cost
|
32
32
|
self.system_monitor = SystemMonitor(dataset_name)
|
33
|
-
self.timeout = timeout
|
34
33
|
|
35
34
|
def export(self, spans):
|
36
35
|
for span in spans:
|
@@ -123,8 +122,7 @@ class RAGATraceExporter(SpanExporter):
|
|
123
122
|
project_id=self.project_id,
|
124
123
|
dataset_name=self.dataset_name,
|
125
124
|
user_details=self.user_details,
|
126
|
-
base_url=self.base_url
|
127
|
-
timeout=self.timeout
|
125
|
+
base_url=self.base_url
|
128
126
|
)
|
129
127
|
|
130
128
|
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|
@@ -37,6 +37,7 @@ logging_level = (
|
|
37
37
|
|
38
38
|
class Tracer(AgenticTracing):
|
39
39
|
NUM_PROJECTS = 99999
|
40
|
+
TIMEOUT = 10
|
40
41
|
def __init__(
|
41
42
|
self,
|
42
43
|
project_name,
|
@@ -46,7 +47,7 @@ class Tracer(AgenticTracing):
|
|
46
47
|
pipeline=None,
|
47
48
|
metadata=None,
|
48
49
|
description=None,
|
49
|
-
|
50
|
+
upload_timeout=30, # Default timeout of 30 seconds
|
50
51
|
update_llm_cost=True, # Parameter to control model cost updates
|
51
52
|
auto_instrumentation={ # to control automatic instrumentation of different components
|
52
53
|
'llm':True,
|
@@ -71,7 +72,7 @@ class Tracer(AgenticTracing):
|
|
71
72
|
pipeline (dict, optional): The pipeline configuration. Defaults to None.
|
72
73
|
metadata (dict, optional): The metadata. Defaults to None.
|
73
74
|
description (str, optional): The description. Defaults to None.
|
74
|
-
|
75
|
+
upload_timeout (int, optional): The upload timeout in seconds. Defaults to 30.
|
75
76
|
update_llm_cost (bool, optional): Whether to update model costs from GitHub. Defaults to True.
|
76
77
|
"""
|
77
78
|
|
@@ -132,8 +133,9 @@ class Tracer(AgenticTracing):
|
|
132
133
|
# self.metadata["total_tokens"] = 0
|
133
134
|
self.pipeline = pipeline
|
134
135
|
self.description = description
|
135
|
-
self.
|
136
|
+
self.upload_timeout = upload_timeout
|
136
137
|
self.base_url = f"{RagaAICatalyst.BASE_URL}"
|
138
|
+
self.timeout = 30
|
137
139
|
self.num_projects = 99999
|
138
140
|
self.start_time = datetime.datetime.now().astimezone().isoformat()
|
139
141
|
self.model_cost_dict = model_cost
|
@@ -397,7 +399,7 @@ class Tracer(AgenticTracing):
|
|
397
399
|
'pipeline': self.pipeline,
|
398
400
|
'metadata': self.metadata,
|
399
401
|
'description': self.description,
|
400
|
-
'
|
402
|
+
'upload_timeout': self.upload_timeout
|
401
403
|
}
|
402
404
|
|
403
405
|
# Reinitialize self with new dataset_name and stored parameters
|
@@ -613,11 +615,11 @@ class Tracer(AgenticTracing):
|
|
613
615
|
|
614
616
|
This function uploads the traces generated by the RagaAICatalyst client to the RagaAICatalyst server. It uses the `aiohttp` library to make an asynchronous HTTP request to the server. The function first checks if the `RAGAAI_CATALYST_TOKEN` environment variable is set. If not, it raises a `ValueError` with the message "RAGAAI_CATALYST_TOKEN not found. Cannot upload traces.".
|
615
617
|
|
616
|
-
The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `
|
618
|
+
The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `upload_timeout` attribute of the `Tracer` object.
|
617
619
|
|
618
620
|
If the upload is successful, the function returns the string "Files uploaded successfully" if the `upload_stat` variable is truthy, otherwise it returns the string "No files to upload".
|
619
621
|
|
620
|
-
If the upload times out, the function returns a string with the message "Upload timed out after {self.
|
622
|
+
If the upload times out, the function returns a string with the message "Upload timed out after {self.upload_timeout} seconds".
|
621
623
|
|
622
624
|
If any other exception occurs during the upload, the function returns a string with the message "Upload failed: {str(e)}", where `{str(e)}` is the string representation of the exception.
|
623
625
|
|
@@ -639,7 +641,7 @@ class Tracer(AgenticTracing):
|
|
639
641
|
session=session,
|
640
642
|
file_paths=[self.filespanx.sync_file],
|
641
643
|
),
|
642
|
-
timeout=self.
|
644
|
+
timeout=self.upload_timeout,
|
643
645
|
)
|
644
646
|
return (
|
645
647
|
"Files uploaded successfully"
|
@@ -647,7 +649,7 @@ class Tracer(AgenticTracing):
|
|
647
649
|
else "No files to upload"
|
648
650
|
)
|
649
651
|
except asyncio.TimeoutError:
|
650
|
-
return f"Upload timed out after {self.
|
652
|
+
return f"Upload timed out after {self.upload_timeout} seconds"
|
651
653
|
except Exception as e:
|
652
654
|
return f"Upload failed: {str(e)}"
|
653
655
|
|
@@ -747,8 +749,7 @@ class Tracer(AgenticTracing):
|
|
747
749
|
dataset_name=self.dataset_name,
|
748
750
|
user_details=self.user_details,
|
749
751
|
base_url=self.base_url,
|
750
|
-
custom_model_cost=self.model_custom_cost
|
751
|
-
timeout=self.timeout
|
752
|
+
custom_model_cost=self.model_custom_cost
|
752
753
|
)
|
753
754
|
|
754
755
|
# Set up tracer provider
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.6.
|
3
|
+
Version: 2.1.6.2
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
|
6
6
|
Requires-Python: <3.13,>=3.10
|
@@ -31,7 +31,7 @@ ragaai_catalyst/tracers/distributed.py,sha256=MwlBwIxCAng-OI-7Ove_rkE1mTLeuW4Jw-
|
|
31
31
|
ragaai_catalyst/tracers/langchain_callback.py,sha256=CB75zzG3-DkYTELj0vI1MOHQTY0MuQJfoHIXz9Cl8S8,34568
|
32
32
|
ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
|
33
33
|
ragaai_catalyst/tracers/llamaindex_instrumentation.py,sha256=Ys_jLkvVqo12bKgXDmkp4TxJu9HkBATrFE8cIcTYxWw,14329
|
34
|
-
ragaai_catalyst/tracers/tracer.py,sha256=
|
34
|
+
ragaai_catalyst/tracers/tracer.py,sha256=juMsA9qkKffoSWYx3a-iTNCY6SnKxjMoYvTSGQ-r-Gk,37080
|
35
35
|
ragaai_catalyst/tracers/upload_traces.py,sha256=nqH6Ldng33VhEGcvQberyWKZ1WvLuBRoySEit8b0f7s,5882
|
36
36
|
ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
|
37
37
|
ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
|
@@ -45,20 +45,20 @@ ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py,sha256=S4rCcKzU
|
|
45
45
|
ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py,sha256=Xk1cLzs-2A3dgyBwRRnCWs7Eubki40FVonwd433hPN8,4805
|
46
46
|
ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
47
47
|
ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py,sha256=LzbsHvELwBmH8ObFomJRhiQ98b6MEi18irm0DPiplt0,29743
|
48
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=
|
48
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=Kmy1kgwy19e7MuMMq9GPUq9VXpJV2bXeaIhx8UxX5Sc,54251
|
49
49
|
ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py,sha256=OBJJjFSvwRjCGNJyqX3yIfC1W05ZN2QUXasCJ4gmCjQ,13930
|
50
50
|
ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
51
51
|
ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=z-qzmCQCkhyW0aLDUR_rNq4pmxhAaVhNY-kZQsox-Ws,50221
|
52
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=
|
52
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=aiFobQb5ePPhyRADXJTZgI8_PrSGhjXnOu9W_o3ngEA,16148
|
53
53
|
ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py,sha256=m8CxYkl7iMiFya_lNwN1ykBc3Pmo-2pR_2HmpptwHWQ,10352
|
54
54
|
ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=xxrliKPfdfbIZRZqMnUewsaTD8_Hv0dbuoBivNZGD4U,21674
|
55
55
|
ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py,sha256=bhSUhNQCuJXKjgJAXhjKEYjnHMpYN90FSZdR84fNIKU,4614
|
56
56
|
ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
57
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=
|
58
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=
|
59
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=
|
57
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=UnGpcMpRbntUrYsIU11r-gMHtzNkDGSGCbepiL_XTFA,12379
|
58
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=icycLgfA0734xxoM1rTMG_iIrI3iM94th8RQggJ7sSw,8541
|
59
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=aw_eHhUYRbR_9IbIkNjYb7NOsmETD3k1p4a6gxaGI7Q,6462
|
60
60
|
ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py,sha256=m1O8lKpxKwtHofXLW3fTHX5yfqDW5GxoveARlg5cTw4,2571
|
61
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py,sha256=
|
61
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py,sha256=dmgcFZtCUpTGV2GayKQ9znaFRDLlXY8NW6RiyNwerOk,4278
|
62
62
|
ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=XdB3X_ufe4RVvGorxSqAiB9dYv4UD7Hvvuw3bsDUppY,60
|
63
63
|
ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py,sha256=JyNCbfpW-w4O9CjtemTqmor2Rh1WGpQwhRaDSRmBxw8,689
|
64
64
|
ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=xHTe7YiCmCY7kRxe0kB7wwo_ueT1UB_hnAA36R2m-pQ,941
|
@@ -74,10 +74,10 @@ ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=rssHolDvKxZ9
|
|
74
74
|
ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=G027toV-Km20JjKrc-Y_PilQ8ABEKrBvvzgLTnqVg7I,5819
|
75
75
|
ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=4TeCGsFF26249fV6dJHLTZDrRa93SG9oer4rudoF8Y4,19443
|
76
76
|
ragaai_catalyst/tracers/exporters/__init__.py,sha256=wQbaqyeIjVZxYprHCKZ9BeiqxeXYBKjzEgP79LWNxCU,293
|
77
|
-
ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=
|
77
|
+
ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=w9U8UTxvTbGTDUoMtsgy2BsdpYp-APTKFdGV4o5JPaM,5051
|
78
78
|
ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=RgGteu-NVGprXKkynvyIO5yOjpbtA41R3W_NzCjnkwE,6445
|
79
79
|
ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=6xvjWXyh8XPkHKSLLmAZUQSvwuyY17ov8pv2VdfI0qA,17875
|
80
|
-
ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=
|
80
|
+
ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=HZG1UjcipgQOHkeqQHVGxenIab2mHqcVmWqtOXlMt6Q,5305
|
81
81
|
ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
82
82
|
ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
|
83
83
|
ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
|
@@ -87,8 +87,8 @@ ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=XS2_x2
|
|
87
87
|
ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256=C3uwkibJ08C9sOX-54kulZYmJlIpZ-SQpfE6HNGrjbM,343502
|
88
88
|
ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=RH9dkCnPaSygvPvAilRE4lUdUaRtALJKH85E4jHcVoM,14072
|
89
89
|
ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
|
90
|
-
ragaai_catalyst-2.1.6.
|
91
|
-
ragaai_catalyst-2.1.6.
|
92
|
-
ragaai_catalyst-2.1.6.
|
93
|
-
ragaai_catalyst-2.1.6.
|
94
|
-
ragaai_catalyst-2.1.6.
|
90
|
+
ragaai_catalyst-2.1.6.2.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
91
|
+
ragaai_catalyst-2.1.6.2.dist-info/METADATA,sha256=yVT5tnmPWjT9KjQBeGsZ1-M5WkefYDDkk4871QZ_oGE,22139
|
92
|
+
ragaai_catalyst-2.1.6.2.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
93
|
+
ragaai_catalyst-2.1.6.2.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
|
94
|
+
ragaai_catalyst-2.1.6.2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|