ragaai-catalyst 2.2.5b4__py3-none-any.whl → 2.2.5b5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +6 -3
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +28 -4
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +13 -12
- ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py +72 -26
- ragaai_catalyst/tracers/tracer.py +1 -1
- {ragaai_catalyst-2.2.5b4.dist-info → ragaai_catalyst-2.2.5b5.dist-info}/METADATA +1 -1
- {ragaai_catalyst-2.2.5b4.dist-info → ragaai_catalyst-2.2.5b5.dist-info}/RECORD +10 -10
- {ragaai_catalyst-2.2.5b4.dist-info → ragaai_catalyst-2.2.5b5.dist-info}/WHEEL +0 -0
- {ragaai_catalyst-2.2.5b4.dist-info → ragaai_catalyst-2.2.5b5.dist-info}/licenses/LICENSE +0 -0
- {ragaai_catalyst-2.2.5b4.dist-info → ragaai_catalyst-2.2.5b5.dist-info}/top_level.txt +0 -0
@@ -240,12 +240,15 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
240
240
|
user_details=user_details,
|
241
241
|
timeout=timeout
|
242
242
|
)
|
243
|
-
logger.info(f"Dataset schema created: {response}")
|
244
243
|
|
245
|
-
|
246
|
-
|
244
|
+
if response is None:
|
245
|
+
logger.error(f"Dataset schema creation failed for {dataset_name} - received None response")
|
246
|
+
elif hasattr(response, 'status_code') and response.status_code in [200, 201]:
|
247
|
+
logger.info(f"Dataset schema created successfully: {response.status_code}")
|
247
248
|
_cache_dataset_creation(cache_key, response)
|
248
249
|
logger.info(f"Response cached successfully for dataset: {dataset_name} and key: {cache_key}")
|
250
|
+
else:
|
251
|
+
logger.warning(f"Dataset schema creation returned unexpected response: {response}")
|
249
252
|
|
250
253
|
except Exception as e:
|
251
254
|
logger.error(f"Error creating dataset schema: {e}")
|
@@ -9,8 +9,6 @@ from requests.exceptions import ConnectionError, Timeout, RequestException
|
|
9
9
|
from http.client import RemoteDisconnected
|
10
10
|
from .session_manager import session_manager
|
11
11
|
|
12
|
-
import requests
|
13
|
-
|
14
12
|
logger = logging.getLogger(__name__)
|
15
13
|
|
16
14
|
from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
|
@@ -64,8 +62,34 @@ class UploadAgenticTraces:
|
|
64
62
|
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
65
63
|
presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
|
66
64
|
return presignedurl
|
65
|
+
elif response.status_code == 401:
|
66
|
+
logger.warning("Received 401 error while getting presign url. Attempting to refresh token.")
|
67
|
+
token = RagaAICatalyst.get_token(force_refresh=True)
|
68
|
+
headers = {
|
69
|
+
"Content-Type": "application/json",
|
70
|
+
"Authorization": f"Bearer {token}",
|
71
|
+
"X-Project-Name": self.project_name,
|
72
|
+
}
|
73
|
+
response = session_manager.make_request_with_retry(
|
74
|
+
"POST", endpoint, headers=headers, data=payload, timeout=self.timeout
|
75
|
+
)
|
76
|
+
elapsed_ms = (time.time() - start_time) * 1000
|
77
|
+
logger.debug(
|
78
|
+
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
79
|
+
)
|
80
|
+
if response.status_code == 200:
|
81
|
+
presignedURLs = response.json()["data"]["presignedUrls"][0]
|
82
|
+
presignedurl = self.update_presigned_url(presignedURLs, self.base_url)
|
83
|
+
return presignedurl
|
84
|
+
else:
|
85
|
+
logger.error(
|
86
|
+
f"Error while getting presigned url after token refresh: {response.json()['message']}"
|
87
|
+
)
|
88
|
+
return None
|
67
89
|
else:
|
68
90
|
# If POST fails, try GET
|
91
|
+
logger.warning(
|
92
|
+
f"POST request failed for getting presign url with status{response.status_code}.Falling back to GET request.")
|
69
93
|
response = session_manager.make_request_with_retry(
|
70
94
|
"GET", endpoint, headers=headers, data=payload, timeout=self.timeout
|
71
95
|
)
|
@@ -80,7 +104,7 @@ class UploadAgenticTraces:
|
|
80
104
|
)
|
81
105
|
return presignedurl
|
82
106
|
elif response.status_code == 401:
|
83
|
-
logger.warning("Received 401 error. Attempting to refresh token.")
|
107
|
+
logger.warning("Received 401 error while getting presign url. Attempting to refresh token.")
|
84
108
|
token = RagaAICatalyst.get_token(force_refresh=True)
|
85
109
|
headers = {
|
86
110
|
"Content-Type": "application/json",
|
@@ -106,7 +130,7 @@ class UploadAgenticTraces:
|
|
106
130
|
return presignedurl
|
107
131
|
else:
|
108
132
|
logger.error(
|
109
|
-
f"Error while getting presigned url: {response.json()['message']}"
|
133
|
+
f"Error while getting presigned url after token refresh: {response.json()['message']}"
|
110
134
|
)
|
111
135
|
return None
|
112
136
|
else:
|
@@ -8,8 +8,6 @@ from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
|
|
8
8
|
from requests.exceptions import ConnectionError, Timeout, RequestException
|
9
9
|
from http.client import RemoteDisconnected
|
10
10
|
|
11
|
-
import requests
|
12
|
-
|
13
11
|
from ragaai_catalyst.ragaai_catalyst import RagaAICatalyst
|
14
12
|
from .session_manager import session_manager
|
15
13
|
|
@@ -106,7 +104,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None, timeou
|
|
106
104
|
session_manager.handle_request_exceptions(e, "fetching dataset code hashes")
|
107
105
|
return None
|
108
106
|
except RequestException as e:
|
109
|
-
logger.error(f"Failed to
|
107
|
+
logger.error(f"Failed to fetch dataset code hashes: {e}")
|
110
108
|
return None
|
111
109
|
|
112
110
|
|
@@ -142,7 +140,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
|
|
142
140
|
start_time = time.time()
|
143
141
|
# Changed to POST from GET
|
144
142
|
endpoint = f"{url_base}/v1/llm/presigned-url"
|
145
|
-
response =
|
143
|
+
response = session_manager.make_request_with_retry(
|
146
144
|
"POST", endpoint, headers=headers, data=payload, timeout=timeout
|
147
145
|
)
|
148
146
|
elapsed_ms = (time.time() - start_time) * 1000
|
@@ -150,20 +148,20 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
|
|
150
148
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
151
149
|
)
|
152
150
|
|
153
|
-
if response.status_code
|
151
|
+
if response.status_code in [200, 201]:
|
154
152
|
presigned_url = response.json()["data"]["presignedUrls"][0]
|
155
153
|
presigned_url = update_presigned_url(presigned_url, url_base)
|
156
154
|
return presigned_url
|
157
155
|
else:
|
158
156
|
# If POST fails, try GET
|
159
|
-
response =
|
157
|
+
response = session_manager.make_request_with_retry(
|
160
158
|
"POST", endpoint, headers=headers, data=payload, timeout=timeout
|
161
159
|
)
|
162
160
|
elapsed_ms = (time.time() - start_time) * 1000
|
163
161
|
logger.debug(
|
164
162
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
165
163
|
)
|
166
|
-
if response.status_code
|
164
|
+
if response.status_code in [200, 201]:
|
167
165
|
presigned_url = response.json()["data"]["presignedUrls"][0]
|
168
166
|
presigned_url = update_presigned_url(presigned_url, url_base)
|
169
167
|
return presigned_url
|
@@ -175,7 +173,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
|
|
175
173
|
"Content-Type": "application/json",
|
176
174
|
"X-Project-Name": project_name,
|
177
175
|
}
|
178
|
-
response =
|
176
|
+
response = session_manager.make_request_with_retry(
|
179
177
|
"POST", endpoint, headers=headers, data=payload, timeout=timeout
|
180
178
|
)
|
181
179
|
elapsed_ms = (time.time() - start_time) * 1000
|
@@ -188,15 +186,18 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120)
|
|
188
186
|
return presigned_url
|
189
187
|
else:
|
190
188
|
logger.error(
|
191
|
-
f"Failed to fetch code
|
189
|
+
f"Failed to fetch presigned URL for code upload after 401: {response.json()['message']}"
|
192
190
|
)
|
193
191
|
else:
|
194
192
|
logger.error(
|
195
|
-
f"Failed to fetch code
|
193
|
+
f"Failed to fetch presigned URL for code upload: {response.json()['message']}"
|
196
194
|
)
|
197
195
|
return None
|
198
|
-
except
|
199
|
-
|
196
|
+
except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout, RemoteDisconnected) as e:
|
197
|
+
session_manager.handle_request_exceptions(e, "fetching presigned URL for code upload")
|
198
|
+
return None
|
199
|
+
except RequestException as e:
|
200
|
+
logger.error(f"Failed to fetch presigned URL for code upload: {e}")
|
200
201
|
return None
|
201
202
|
|
202
203
|
|
@@ -1,8 +1,16 @@
|
|
1
1
|
import os
|
2
2
|
import json
|
3
3
|
import re
|
4
|
-
import
|
4
|
+
import logging
|
5
|
+
import time
|
6
|
+
from urllib3.exceptions import PoolError, MaxRetryError, NewConnectionError
|
7
|
+
from requests.exceptions import ConnectionError, Timeout, RequestException
|
8
|
+
from http.client import RemoteDisconnected
|
9
|
+
|
5
10
|
from ragaai_catalyst import RagaAICatalyst
|
11
|
+
from ragaai_catalyst.tracers.agentic_tracing.upload.session_manager import session_manager
|
12
|
+
|
13
|
+
logger = logging.getLogger(__name__)
|
6
14
|
|
7
15
|
def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None, user_details=None, timeout=120):
|
8
16
|
SCHEMA_MAPPING = {}
|
@@ -13,31 +21,69 @@ def create_dataset_schema_with_trace(project_name, dataset_name, base_url=None,
|
|
13
21
|
continue
|
14
22
|
SCHEMA_MAPPING[key] = {"columnType": "metadata"}
|
15
23
|
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
24
|
+
headers = {
|
25
|
+
"Content-Type": "application/json",
|
26
|
+
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
27
|
+
"X-Project-Name": project_name,
|
28
|
+
}
|
29
|
+
|
30
|
+
if SCHEMA_MAPPING:
|
31
|
+
payload = json.dumps({
|
32
|
+
"datasetName": dataset_name,
|
33
|
+
"traceFolderUrl": None,
|
34
|
+
"schemaMapping": SCHEMA_MAPPING
|
35
|
+
})
|
36
|
+
else:
|
37
|
+
payload = json.dumps({
|
38
|
+
"datasetName": dataset_name,
|
39
|
+
"traceFolderUrl": None,
|
40
|
+
})
|
41
|
+
|
42
|
+
try:
|
33
43
|
# Use provided base_url or fall back to default
|
34
44
|
url_base = base_url if base_url is not None else RagaAICatalyst.BASE_URL
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
timeout=timeout
|
45
|
+
start_time = time.time()
|
46
|
+
endpoint = f"{url_base}/v1/llm/dataset/logs"
|
47
|
+
|
48
|
+
response = session_manager.make_request_with_retry(
|
49
|
+
"POST", endpoint, headers=headers, data=payload, timeout=timeout
|
50
|
+
)
|
51
|
+
|
52
|
+
elapsed_ms = (time.time() - start_time) * 1000
|
53
|
+
logger.debug(
|
54
|
+
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
40
55
|
)
|
41
|
-
|
42
|
-
|
43
|
-
|
56
|
+
|
57
|
+
if response.status_code in [200, 201]:
|
58
|
+
logger.info(f"Dataset schema created successfully: {response.status_code}")
|
59
|
+
return response
|
60
|
+
elif response.status_code == 401:
|
61
|
+
logger.warning("Received 401 error during dataset schema creation. Attempting to refresh token.")
|
62
|
+
RagaAICatalyst.get_token(force_refresh=True)
|
63
|
+
headers = {
|
64
|
+
"Content-Type": "application/json",
|
65
|
+
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
66
|
+
"X-Project-Name": project_name,
|
67
|
+
}
|
68
|
+
response = session_manager.make_request_with_retry(
|
69
|
+
"POST", endpoint, headers=headers, data=payload, timeout=timeout
|
70
|
+
)
|
71
|
+
elapsed_ms = (time.time() - start_time) * 1000
|
72
|
+
logger.debug(
|
73
|
+
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms"
|
74
|
+
)
|
75
|
+
if response.status_code in [200, 201]:
|
76
|
+
logger.info(f"Dataset schema created successfully after 401: {response.status_code}")
|
77
|
+
return response
|
78
|
+
else:
|
79
|
+
logger.error(f"Failed to create dataset schema after 401: {response.status_code}")
|
80
|
+
return None
|
81
|
+
else:
|
82
|
+
logger.error(f"Failed to create dataset schema: {response.status_code}")
|
83
|
+
return None
|
84
|
+
except (PoolError, MaxRetryError, NewConnectionError, ConnectionError, Timeout, RemoteDisconnected) as e:
|
85
|
+
session_manager.handle_request_exceptions(e, "creating dataset schema")
|
86
|
+
return None
|
87
|
+
except RequestException as e:
|
88
|
+
logger.error(f"Failed to create dataset schema: {e}")
|
89
|
+
return None
|
@@ -170,7 +170,7 @@ class Tracer(AgenticTracing):
|
|
170
170
|
logger.error(f"Failed to retrieve projects list: {e}")
|
171
171
|
|
172
172
|
# Handle agentic tracers
|
173
|
-
if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain" or tracer_type == "google-adk":
|
173
|
+
if tracer_type == "agentic" or tracer_type.startswith("agentic/") or tracer_type == "langchain" or tracer_type == "llamaindex" or tracer_type == "google-adk":
|
174
174
|
# Setup instrumentors based on tracer type
|
175
175
|
instrumentors = []
|
176
176
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.2.
|
3
|
+
Version: 2.2.5b5
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>, Rishabh Pandey <rishabh.pandey@raga.ai>, Jyotsana C G <jyotsana@raga.ai>
|
6
6
|
Requires-Python: <=3.13.2,>=3.10
|
@@ -27,7 +27,7 @@ ragaai_catalyst/redteaming/utils/issue_description.py,sha256=iB0XbeOjdqHTPrikCKS
|
|
27
27
|
ragaai_catalyst/redteaming/utils/rt.png,sha256=HzVC8bz_4UgwafKXuMe8RJVI6CyK_UmSgo53ceAOQK8,282154
|
28
28
|
ragaai_catalyst/tracers/__init__.py,sha256=LfgTes-nHpazssbGKnn8kyLZNr49kIPrlkrqqoTFTfc,301
|
29
29
|
ragaai_catalyst/tracers/distributed.py,sha256=CGPuOh4CsgEk428PPibieLaAG2Tt3BVygF6ZlmbXxg4,10009
|
30
|
-
ragaai_catalyst/tracers/tracer.py,sha256=
|
30
|
+
ragaai_catalyst/tracers/tracer.py,sha256=5jM-AJozLoc95OIR-K3K2fs0OJ2Oo47Lgtca_6ToFzk,33649
|
31
31
|
ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
|
32
32
|
ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
|
33
33
|
ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -36,11 +36,11 @@ ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-
|
|
36
36
|
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=Wq4LFclPlLy47LyXvbaLeYiSMQABj7VYS3J87xyea_E,4159
|
37
37
|
ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
38
38
|
ragaai_catalyst/tracers/agentic_tracing/upload/session_manager.py,sha256=sOlxeIYIP8tycaTtZC9xkZosi6EDJUxvDw0_rc_NLI8,6823
|
39
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=
|
40
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=
|
41
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=
|
39
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=NuetePZOdDmwRgN3aAsQrDIytXFicNylaAqORdz8C2o,25051
|
40
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=yLzYiyNk_XtwwlUFbq5uaRahDKKF-eFBoI6xqHUNedw,14526
|
41
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=IAhNFS-nbV_ImNz8Xp98qU4r-2naj49qg9q08x53TFE,12521
|
42
42
|
ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
43
|
-
ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=
|
43
|
+
ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=1c6HA_Alm5yofF-eifqwdwHboxr25mvW3xxVYBktfjA,3667
|
44
44
|
ragaai_catalyst/tracers/agentic_tracing/utils/file_name_tracker.py,sha256=YG601l1a29ov9VPu9Vl4RXxgL7l16k54_WWnoTNoG58,2064
|
45
45
|
ragaai_catalyst/tracers/agentic_tracing/utils/llm_utils.py,sha256=PiyXvEj_qu0EnJFjk4GfGyWFZbwlvQQh0hdQ_lm0p8E,22976
|
46
46
|
ragaai_catalyst/tracers/agentic_tracing/utils/model_costs.json,sha256=2tzGw_cKCTPcfjEm7iGvFE6pTw7gMTPzeBov_MTaXNY,321336
|
@@ -58,8 +58,8 @@ ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256
|
|
58
58
|
ragaai_catalyst/tracers/utils/rag_extraction_logic_final.py,sha256=3ygkRT__lLDRflRttjzPu28tIA8cTCiGQVMQjqMItqQ,11309
|
59
59
|
ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=-HZVmijeUFLO7e9OAvi1RJdWVTxPRUHPd1MkKQlCD54,11785
|
60
60
|
ragaai_catalyst/tracers/utils/utils.py,sha256=o-p9n2ZuophdrV0wrixu-BqRHCkovup_klc3mS8mU8g,2374
|
61
|
-
ragaai_catalyst-2.2.
|
62
|
-
ragaai_catalyst-2.2.
|
63
|
-
ragaai_catalyst-2.2.
|
64
|
-
ragaai_catalyst-2.2.
|
65
|
-
ragaai_catalyst-2.2.
|
61
|
+
ragaai_catalyst-2.2.5b5.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
62
|
+
ragaai_catalyst-2.2.5b5.dist-info/METADATA,sha256=Oho91NQ2Otn-agTiKESdvBL1iZwLyQLeBtWTgu5nfc8,17735
|
63
|
+
ragaai_catalyst-2.2.5b5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
64
|
+
ragaai_catalyst-2.2.5b5.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
|
65
|
+
ragaai_catalyst-2.2.5b5.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|