ragaai-catalyst 2.1.6b1__py3-none-any.whl → 2.1.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/guard_executor.py +1 -0
- ragaai_catalyst/tracers/agentic_tracing/tracers/base.py +3 -1
- ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py +2 -2
- ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py +9 -5
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py +1 -1
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py +13 -13
- ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py +2 -2
- ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py +3 -2
- ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py +4 -2
- ragaai_catalyst/tracers/instrumentators/__init__.py +0 -5
- ragaai_catalyst/tracers/tracer.py +31 -17
- ragaai_catalyst/tracers/upload_traces.py +5 -2
- ragaai_catalyst/tracers/utils/trace_json_converter.py +39 -24
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1.dist-info}/METADATA +5 -4
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1.dist-info}/RECORD +18 -21
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1.dist-info}/WHEEL +1 -1
- ragaai_catalyst/tracers/instrumentators/langchain.py +0 -14
- ragaai_catalyst/tracers/instrumentators/llamaindex.py +0 -14
- ragaai_catalyst/tracers/instrumentators/openai.py +0 -13
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1.dist-info/licenses}/LICENSE +0 -0
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1.dist-info}/top_level.txt +0 -0
@@ -164,6 +164,7 @@ class GuardExecutor:
|
|
164
164
|
return doc
|
165
165
|
|
166
166
|
def execute_input_guardrails(self, prompt, prompt_params):
|
167
|
+
self.current_trace_id =None
|
167
168
|
doc = self.set_variables(prompt,prompt_params)
|
168
169
|
deployment_response = self.execute_deployment(self.input_deployment_id,doc)
|
169
170
|
self.current_trace_id = deployment_response['data']['results'][0]['executionId']
|
@@ -92,6 +92,7 @@ class BaseTracer:
|
|
92
92
|
self._upload_tasks = []
|
93
93
|
self._is_uploading = False
|
94
94
|
self._upload_completed_callback = None
|
95
|
+
self.timeout = self.user_details.get("timeout", 120)
|
95
96
|
|
96
97
|
ensure_uploader_running()
|
97
98
|
|
@@ -314,7 +315,8 @@ class BaseTracer:
|
|
314
315
|
project_id=self.project_id,
|
315
316
|
dataset_name=self.dataset_name,
|
316
317
|
user_details=self.user_details,
|
317
|
-
base_url=self.base_url
|
318
|
+
base_url=self.base_url,
|
319
|
+
timeout=self.timeout
|
318
320
|
)
|
319
321
|
|
320
322
|
# For backward compatibility
|
@@ -48,7 +48,7 @@ from ragaai_catalyst.tracers.upload_traces import UploadTraces
|
|
48
48
|
class AgenticTracing(
|
49
49
|
BaseTracer, LLMTracerMixin, ToolTracerMixin, AgentTracerMixin, CustomTracerMixin
|
50
50
|
):
|
51
|
-
def __init__(self, user_detail, auto_instrumentation=None):
|
51
|
+
def __init__(self, user_detail, auto_instrumentation=None, timeout=120):
|
52
52
|
# Initialize all parent classes
|
53
53
|
self.user_interaction_tracer = UserInteractionTracer()
|
54
54
|
LLMTracerMixin.__init__(self)
|
@@ -60,7 +60,7 @@ class AgenticTracing(
|
|
60
60
|
self.project_id = user_detail["project_id"]
|
61
61
|
self.trace_user_detail = user_detail["trace_user_detail"]
|
62
62
|
self.base_url = f"{RagaAICatalyst.BASE_URL}"
|
63
|
-
self.timeout =
|
63
|
+
self.timeout = timeout
|
64
64
|
|
65
65
|
# Add warning flag
|
66
66
|
self._warning_shown = False
|
@@ -77,7 +77,7 @@ def get_executor():
|
|
77
77
|
|
78
78
|
def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
79
79
|
project_name: str, project_id: str, dataset_name: str,
|
80
|
-
user_details: Dict[str, Any], base_url: str) -> Dict[str, Any]:
|
80
|
+
user_details: Dict[str, Any], base_url: str, timeout=120) -> Dict[str, Any]:
|
81
81
|
"""
|
82
82
|
Process a single upload task
|
83
83
|
|
@@ -147,7 +147,8 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
147
147
|
json_file_path=filepath,
|
148
148
|
dataset_name=dataset_name,
|
149
149
|
project_name=project_name,
|
150
|
-
base_url=base_url
|
150
|
+
base_url=base_url,
|
151
|
+
timeout=timeout
|
151
152
|
)
|
152
153
|
logger.info(f"Trace metrics uploaded: {response}")
|
153
154
|
except Exception as e:
|
@@ -167,6 +168,7 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
167
168
|
dataset_name=dataset_name,
|
168
169
|
user_detail=user_details,
|
169
170
|
base_url=base_url,
|
171
|
+
timeout=timeout
|
170
172
|
)
|
171
173
|
upload_traces.upload_agentic_traces()
|
172
174
|
logger.info("Agentic traces uploaded successfully")
|
@@ -185,7 +187,8 @@ def process_upload(task_id: str, filepath: str, hash_id: str, zip_path: str,
|
|
185
187
|
zip_path=zip_path,
|
186
188
|
project_name=project_name,
|
187
189
|
dataset_name=dataset_name,
|
188
|
-
base_url=base_url
|
190
|
+
base_url=base_url,
|
191
|
+
timeout=timeout
|
189
192
|
)
|
190
193
|
logger.info(f"Code hash uploaded: {response}")
|
191
194
|
except Exception as e:
|
@@ -215,7 +218,7 @@ def save_task_status(task_status: Dict[str, Any]):
|
|
215
218
|
with open(status_path, "w") as f:
|
216
219
|
json.dump(task_status, f, indent=2)
|
217
220
|
|
218
|
-
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url):
|
221
|
+
def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, dataset_name, user_details, base_url, timeout=120):
|
219
222
|
"""
|
220
223
|
Submit a new upload task using futures.
|
221
224
|
|
@@ -259,7 +262,8 @@ def submit_upload_task(filepath, hash_id, zip_path, project_name, project_id, da
|
|
259
262
|
project_id=project_id,
|
260
263
|
dataset_name=dataset_name,
|
261
264
|
user_details=user_details,
|
262
|
-
base_url=base_url
|
265
|
+
base_url=base_url,
|
266
|
+
timeout=timeout
|
263
267
|
)
|
264
268
|
|
265
269
|
# Store the future for later status checks
|
@@ -9,19 +9,19 @@ logger = logging.getLogger(__name__)
|
|
9
9
|
from urllib.parse import urlparse, urlunparse
|
10
10
|
import re
|
11
11
|
|
12
|
-
def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None):
|
13
|
-
code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url)
|
12
|
+
def upload_code(hash_id, zip_path, project_name, dataset_name, base_url=None, timeout=120):
|
13
|
+
code_hashes_list = _fetch_dataset_code_hashes(project_name, dataset_name, base_url, timeout=timeout)
|
14
14
|
|
15
15
|
if hash_id not in code_hashes_list:
|
16
|
-
presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url)
|
17
|
-
_put_zip_presigned_url(project_name, presigned_url, zip_path)
|
16
|
+
presigned_url = _fetch_presigned_url(project_name, dataset_name, base_url, timeout=timeout)
|
17
|
+
_put_zip_presigned_url(project_name, presigned_url, zip_path, timeout=timeout)
|
18
18
|
|
19
|
-
response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url)
|
19
|
+
response = _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url, timeout=timeout)
|
20
20
|
return response
|
21
21
|
else:
|
22
22
|
return "Code already exists"
|
23
23
|
|
24
|
-
def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
|
24
|
+
def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None, timeout=120):
|
25
25
|
payload = {}
|
26
26
|
headers = {
|
27
27
|
"Authorization": f"Bearer {os.getenv('RAGAAI_CATALYST_TOKEN')}",
|
@@ -36,7 +36,7 @@ def _fetch_dataset_code_hashes(project_name, dataset_name, base_url=None):
|
|
36
36
|
endpoint,
|
37
37
|
headers=headers,
|
38
38
|
data=payload,
|
39
|
-
timeout=
|
39
|
+
timeout=timeout)
|
40
40
|
elapsed_ms = (time.time() - start_time) * 1000
|
41
41
|
logger.debug(
|
42
42
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -66,7 +66,7 @@ def update_presigned_url(presigned_url, base_url):
|
|
66
66
|
return presigned_url
|
67
67
|
|
68
68
|
|
69
|
-
def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
69
|
+
def _fetch_presigned_url(project_name, dataset_name, base_url=None, timeout=120):
|
70
70
|
payload = json.dumps({
|
71
71
|
"datasetName": dataset_name,
|
72
72
|
"numFiles": 1,
|
@@ -87,7 +87,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
|
87
87
|
endpoint,
|
88
88
|
headers=headers,
|
89
89
|
data=payload,
|
90
|
-
timeout=
|
90
|
+
timeout=timeout)
|
91
91
|
elapsed_ms = (time.time() - start_time) * 1000
|
92
92
|
logger.debug(
|
93
93
|
f"API Call: [GET] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -102,7 +102,7 @@ def _fetch_presigned_url(project_name, dataset_name, base_url=None):
|
|
102
102
|
logger.error(f"Failed to list datasets: {e}")
|
103
103
|
raise
|
104
104
|
|
105
|
-
def _put_zip_presigned_url(project_name, presignedUrl, filename):
|
105
|
+
def _put_zip_presigned_url(project_name, presignedUrl, filename, timeout=120):
|
106
106
|
headers = {
|
107
107
|
"X-Project-Name": project_name,
|
108
108
|
"Content-Type": "application/zip",
|
@@ -119,14 +119,14 @@ def _put_zip_presigned_url(project_name, presignedUrl, filename):
|
|
119
119
|
presignedUrl,
|
120
120
|
headers=headers,
|
121
121
|
data=payload,
|
122
|
-
timeout=
|
122
|
+
timeout=timeout)
|
123
123
|
elapsed_ms = (time.time() - start_time) * 1000
|
124
124
|
logger.debug(
|
125
125
|
f"API Call: [PUT] {presignedUrl} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
126
126
|
if response.status_code != 200 or response.status_code != 201:
|
127
127
|
return response, response.status_code
|
128
128
|
|
129
|
-
def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None):
|
129
|
+
def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=None, timeout=120):
|
130
130
|
payload = json.dumps({
|
131
131
|
"datasetName": dataset_name,
|
132
132
|
"codeHash": hash_id,
|
@@ -147,7 +147,7 @@ def _insert_code(dataset_name, hash_id, presigned_url, project_name, base_url=No
|
|
147
147
|
endpoint,
|
148
148
|
headers=headers,
|
149
149
|
data=payload,
|
150
|
-
timeout=
|
150
|
+
timeout=timeout)
|
151
151
|
elapsed_ms = (time.time() - start_time) * 1000
|
152
152
|
logger.debug(
|
153
153
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -15,7 +15,7 @@ logging_level = (
|
|
15
15
|
)
|
16
16
|
|
17
17
|
|
18
|
-
def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None):
|
18
|
+
def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=None, timeout=120):
|
19
19
|
try:
|
20
20
|
with open(json_file_path, "r") as f:
|
21
21
|
traces = json.load(f)
|
@@ -51,7 +51,7 @@ def upload_trace_metric(json_file_path, dataset_name, project_name, base_url=Non
|
|
51
51
|
endpoint,
|
52
52
|
headers=headers,
|
53
53
|
data=payload,
|
54
|
-
timeout=
|
54
|
+
timeout=timeout)
|
55
55
|
elapsed_ms = (time.time() - start_time) * 1000
|
56
56
|
logger.debug(
|
57
57
|
f"API Call: [POST] {endpoint} | Status: {response.status_code} | Time: {elapsed_ms:.2f}ms")
|
@@ -14,7 +14,7 @@ class DynamicTraceExporter(SpanExporter):
|
|
14
14
|
certain properties to be updated dynamically during execution.
|
15
15
|
"""
|
16
16
|
|
17
|
-
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
|
17
|
+
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120):
|
18
18
|
"""
|
19
19
|
Initialize the DynamicTraceExporter.
|
20
20
|
|
@@ -33,7 +33,8 @@ class DynamicTraceExporter(SpanExporter):
|
|
33
33
|
dataset_name=dataset_name,
|
34
34
|
user_details=user_details,
|
35
35
|
base_url=base_url,
|
36
|
-
custom_model_cost=custom_model_cost
|
36
|
+
custom_model_cost=custom_model_cost,
|
37
|
+
timeout=timeout
|
37
38
|
)
|
38
39
|
|
39
40
|
# Store the initial values
|
@@ -19,7 +19,7 @@ logging_level = (
|
|
19
19
|
|
20
20
|
|
21
21
|
class RAGATraceExporter(SpanExporter):
|
22
|
-
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost):
|
22
|
+
def __init__(self, files_to_zip, project_name, project_id, dataset_name, user_details, base_url, custom_model_cost, timeout=120):
|
23
23
|
self.trace_spans = dict()
|
24
24
|
self.tmp_dir = tempfile.gettempdir()
|
25
25
|
self.files_to_zip = files_to_zip
|
@@ -30,6 +30,7 @@ class RAGATraceExporter(SpanExporter):
|
|
30
30
|
self.base_url = base_url
|
31
31
|
self.custom_model_cost = custom_model_cost
|
32
32
|
self.system_monitor = SystemMonitor(dataset_name)
|
33
|
+
self.timeout = timeout
|
33
34
|
|
34
35
|
def export(self, spans):
|
35
36
|
for span in spans:
|
@@ -122,7 +123,8 @@ class RAGATraceExporter(SpanExporter):
|
|
122
123
|
project_id=self.project_id,
|
123
124
|
dataset_name=self.dataset_name,
|
124
125
|
user_details=self.user_details,
|
125
|
-
base_url=self.base_url
|
126
|
+
base_url=self.base_url,
|
127
|
+
timeout=self.timeout
|
126
128
|
)
|
127
129
|
|
128
130
|
logger.info(f"Submitted upload task with ID: {self.upload_task_id}")
|
@@ -21,11 +21,6 @@ from opentelemetry.sdk import trace as trace_sdk
|
|
21
21
|
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
22
22
|
from ragaai_catalyst.tracers.exporters.file_span_exporter import FileSpanExporter
|
23
23
|
from ragaai_catalyst.tracers.exporters.raga_exporter import RagaExporter
|
24
|
-
from ragaai_catalyst.tracers.instrumentators import (
|
25
|
-
LangchainInstrumentor,
|
26
|
-
OpenAIInstrumentor,
|
27
|
-
LlamaIndexInstrumentor,
|
28
|
-
)
|
29
24
|
from ragaai_catalyst.tracers.utils import get_unique_key
|
30
25
|
# from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
|
31
26
|
from ragaai_catalyst.tracers.llamaindex_instrumentation import LlamaIndexInstrumentationTracer
|
@@ -42,7 +37,6 @@ logging_level = (
|
|
42
37
|
|
43
38
|
class Tracer(AgenticTracing):
|
44
39
|
NUM_PROJECTS = 99999
|
45
|
-
TIMEOUT = 10
|
46
40
|
def __init__(
|
47
41
|
self,
|
48
42
|
project_name,
|
@@ -52,7 +46,7 @@ class Tracer(AgenticTracing):
|
|
52
46
|
pipeline=None,
|
53
47
|
metadata=None,
|
54
48
|
description=None,
|
55
|
-
|
49
|
+
timeout=120, # Default timeout of 120 seconds
|
56
50
|
update_llm_cost=True, # Parameter to control model cost updates
|
57
51
|
auto_instrumentation={ # to control automatic instrumentation of different components
|
58
52
|
'llm':True,
|
@@ -77,7 +71,7 @@ class Tracer(AgenticTracing):
|
|
77
71
|
pipeline (dict, optional): The pipeline configuration. Defaults to None.
|
78
72
|
metadata (dict, optional): The metadata. Defaults to None.
|
79
73
|
description (str, optional): The description. Defaults to None.
|
80
|
-
|
74
|
+
timeout (int, optional): The upload timeout in seconds. Defaults to 30.
|
81
75
|
update_llm_cost (bool, optional): Whether to update model costs from GitHub. Defaults to True.
|
82
76
|
"""
|
83
77
|
|
@@ -138,9 +132,8 @@ class Tracer(AgenticTracing):
|
|
138
132
|
# self.metadata["total_tokens"] = 0
|
139
133
|
self.pipeline = pipeline
|
140
134
|
self.description = description
|
141
|
-
self.
|
135
|
+
self.timeout = timeout
|
142
136
|
self.base_url = f"{RagaAICatalyst.BASE_URL}"
|
143
|
-
self.timeout = 30
|
144
137
|
self.num_projects = 99999
|
145
138
|
self.start_time = datetime.datetime.now().astimezone().isoformat()
|
146
139
|
self.model_cost_dict = model_cost
|
@@ -283,6 +276,14 @@ class Tracer(AgenticTracing):
|
|
283
276
|
logger.info("Instrumenting Smolagents...")
|
284
277
|
except (ImportError, ModuleNotFoundError):
|
285
278
|
logger.debug("Smolagents not available in environment")
|
279
|
+
|
280
|
+
# OpenAI Agents
|
281
|
+
try:
|
282
|
+
from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
|
283
|
+
instrumentors.append((OpenAIAgentsInstrumentor, []))
|
284
|
+
logger.info("Instrumenting OpenAI Agents...")
|
285
|
+
except (ImportError, ModuleNotFoundError):
|
286
|
+
logger.debug("OpenAI Agents not available in environment")
|
286
287
|
|
287
288
|
if not instrumentors:
|
288
289
|
logger.warning("No agentic packages found in environment to instrument")
|
@@ -298,7 +299,7 @@ class Tracer(AgenticTracing):
|
|
298
299
|
elif tracer_type == "agentic/llamaindex":
|
299
300
|
from openinference.instrumentation.llama_index import LlamaIndexInstrumentor
|
300
301
|
instrumentors += [(LlamaIndexInstrumentor, [])]
|
301
|
-
|
302
|
+
|
302
303
|
elif tracer_type == "agentic/langchain" or tracer_type == "agentic/langgraph":
|
303
304
|
from openinference.instrumentation.langchain import LangChainInstrumentor
|
304
305
|
instrumentors += [(LangChainInstrumentor, [])]
|
@@ -319,6 +320,10 @@ class Tracer(AgenticTracing):
|
|
319
320
|
elif tracer_type == "agentic/smolagents":
|
320
321
|
from openinference.instrumentation.smolagents import SmolagentsInstrumentor
|
321
322
|
instrumentors += [(SmolagentsInstrumentor, [])]
|
323
|
+
|
324
|
+
elif tracer_type == "agentic/openai_agents":
|
325
|
+
from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
|
326
|
+
instrumentors += [(OpenAIAgentsInstrumentor, [])]
|
322
327
|
|
323
328
|
else:
|
324
329
|
# Unknown agentic tracer type
|
@@ -392,7 +397,7 @@ class Tracer(AgenticTracing):
|
|
392
397
|
'pipeline': self.pipeline,
|
393
398
|
'metadata': self.metadata,
|
394
399
|
'description': self.description,
|
395
|
-
'
|
400
|
+
'timeout': self.timeout
|
396
401
|
}
|
397
402
|
|
398
403
|
# Reinitialize self with new dataset_name and stored parameters
|
@@ -518,6 +523,14 @@ class Tracer(AgenticTracing):
|
|
518
523
|
combined_metadata.update(user_detail['trace_user_detail']['metadata'])
|
519
524
|
if additional_metadata:
|
520
525
|
combined_metadata.update(additional_metadata)
|
526
|
+
|
527
|
+
model_cost_latency_metadata = {}
|
528
|
+
if additional_metadata:
|
529
|
+
model_cost_latency_metadata["model"] = additional_metadata["model_name"]
|
530
|
+
model_cost_latency_metadata["total_cost"] = additional_metadata["cost"]
|
531
|
+
model_cost_latency_metadata["total_latency"] = additional_metadata["latency"]
|
532
|
+
model_cost_latency_metadata["recorded_on"] = datetime.datetime.now().astimezone().isoformat()
|
533
|
+
combined_metadata.update(model_cost_latency_metadata)
|
521
534
|
|
522
535
|
langchain_traces = langchain_tracer_extraction(data, self.user_context)
|
523
536
|
final_result = convert_langchain_callbacks_output(langchain_traces)
|
@@ -600,11 +613,11 @@ class Tracer(AgenticTracing):
|
|
600
613
|
|
601
614
|
This function uploads the traces generated by the RagaAICatalyst client to the RagaAICatalyst server. It uses the `aiohttp` library to make an asynchronous HTTP request to the server. The function first checks if the `RAGAAI_CATALYST_TOKEN` environment variable is set. If not, it raises a `ValueError` with the message "RAGAAI_CATALYST_TOKEN not found. Cannot upload traces.".
|
602
615
|
|
603
|
-
The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `
|
616
|
+
The function then uses the `asyncio.wait_for` function to wait for the `check_and_upload_files` method of the `raga_client` object to complete. The `check_and_upload_files` method is called with the `session` object and a list of file paths to be uploaded. The `timeout` parameter is set to the value of the `timeout` attribute of the `Tracer` object.
|
604
617
|
|
605
618
|
If the upload is successful, the function returns the string "Files uploaded successfully" if the `upload_stat` variable is truthy, otherwise it returns the string "No files to upload".
|
606
619
|
|
607
|
-
If the upload times out, the function returns a string with the message "Upload timed out after {self.
|
620
|
+
If the upload times out, the function returns a string with the message "Upload timed out after {self.timeout} seconds".
|
608
621
|
|
609
622
|
If any other exception occurs during the upload, the function returns a string with the message "Upload failed: {str(e)}", where `{str(e)}` is the string representation of the exception.
|
610
623
|
|
@@ -626,7 +639,7 @@ class Tracer(AgenticTracing):
|
|
626
639
|
session=session,
|
627
640
|
file_paths=[self.filespanx.sync_file],
|
628
641
|
),
|
629
|
-
timeout=self.
|
642
|
+
timeout=self.timeout,
|
630
643
|
)
|
631
644
|
return (
|
632
645
|
"Files uploaded successfully"
|
@@ -634,7 +647,7 @@ class Tracer(AgenticTracing):
|
|
634
647
|
else "No files to upload"
|
635
648
|
)
|
636
649
|
except asyncio.TimeoutError:
|
637
|
-
return f"Upload timed out after {self.
|
650
|
+
return f"Upload timed out after {self.timeout} seconds"
|
638
651
|
except Exception as e:
|
639
652
|
return f"Upload failed: {str(e)}"
|
640
653
|
|
@@ -734,7 +747,8 @@ class Tracer(AgenticTracing):
|
|
734
747
|
dataset_name=self.dataset_name,
|
735
748
|
user_details=self.user_details,
|
736
749
|
base_url=self.base_url,
|
737
|
-
custom_model_cost=self.model_custom_cost
|
750
|
+
custom_model_cost=self.model_custom_cost,
|
751
|
+
timeout=self.timeout
|
738
752
|
)
|
739
753
|
|
740
754
|
# Set up tracer provider
|
@@ -28,11 +28,14 @@ class UploadTraces:
|
|
28
28
|
"response":{"columnType": "response"},
|
29
29
|
"context": {"columnType": "context"},
|
30
30
|
"llm_model": {"columnType":"pipeline"},
|
31
|
-
"recorded_on": {"columnType": "
|
31
|
+
"recorded_on": {"columnType": "timestamp"},
|
32
32
|
"embed_model": {"columnType":"pipeline"},
|
33
33
|
"log_source": {"columnType": "metadata"},
|
34
34
|
"vector_store":{"columnType":"pipeline"},
|
35
|
-
"feedback": {"columnType":"feedBack"}
|
35
|
+
"feedback": {"columnType":"feedBack"},
|
36
|
+
"model": {"columnType": "metadata"},
|
37
|
+
"total_cost": {"columnType": "metadata", "dataType": "numerical"},
|
38
|
+
"total_latency": {"columnType": "metadata", "dataType": "numerical"},
|
36
39
|
}
|
37
40
|
|
38
41
|
if additional_metadata_keys:
|
@@ -226,7 +226,7 @@ def get_spans(input_trace, custom_model_cost):
|
|
226
226
|
|
227
227
|
def convert_json_format(input_trace, custom_model_cost):
|
228
228
|
"""
|
229
|
-
Converts a JSON from one format to UI format.
|
229
|
+
Converts a JSON from one format to UI format, handling nested spans.
|
230
230
|
|
231
231
|
Args:
|
232
232
|
input_trace (str): The input JSON string.
|
@@ -238,8 +238,8 @@ def convert_json_format(input_trace, custom_model_cost):
|
|
238
238
|
"id": input_trace[0]["context"]["trace_id"],
|
239
239
|
"trace_name": "",
|
240
240
|
"project_name": "",
|
241
|
-
"start_time": convert_time_format(min(item["start_time"] for item in input_trace)),
|
242
|
-
"end_time": convert_time_format(max(item["end_time"] for item in input_trace))
|
241
|
+
"start_time": convert_time_format(min(item["start_time"] for item in input_trace)),
|
242
|
+
"end_time": convert_time_format(max(item["end_time"] for item in input_trace))
|
243
243
|
}
|
244
244
|
final_trace["metadata"] = {
|
245
245
|
"tokens": {
|
@@ -253,31 +253,46 @@ def convert_json_format(input_trace, custom_model_cost):
|
|
253
253
|
"total_cost": 0.0
|
254
254
|
}
|
255
255
|
}
|
256
|
-
final_trace["replays"]={"source":None}
|
257
|
-
final_trace["data"]=[{}]
|
256
|
+
final_trace["replays"] = {"source": None}
|
257
|
+
final_trace["data"] = [{}]
|
258
|
+
final_trace["network_calls"] = []
|
259
|
+
final_trace["interactions"] = []
|
260
|
+
|
261
|
+
# import pdb; pdb.set_trace()
|
262
|
+
|
263
|
+
# Helper to recursively extract cost/token info from all spans
|
264
|
+
def accumulate_metrics(span):
|
265
|
+
if span["type"] == "llm" and "info" in span:
|
266
|
+
info = span["info"]
|
267
|
+
cost = info.get("cost", {})
|
268
|
+
tokens = info.get("tokens", {})
|
269
|
+
|
270
|
+
final_trace["metadata"]["tokens"]["prompt_tokens"] += tokens.get("prompt_tokens", 0.0)
|
271
|
+
final_trace["metadata"]["tokens"]["completion_tokens"] += tokens.get("completion_tokens", 0.0)
|
272
|
+
final_trace["metadata"]["tokens"]["total_tokens"] += tokens.get("total_tokens", 0.0)
|
273
|
+
|
274
|
+
final_trace["metadata"]["cost"]["input_cost"] += cost.get("input_cost", 0.0)
|
275
|
+
final_trace["metadata"]["cost"]["output_cost"] += cost.get("output_cost", 0.0)
|
276
|
+
final_trace["metadata"]["cost"]["total_cost"] += cost.get("total_cost", 0.0)
|
277
|
+
|
278
|
+
# Recursively process children
|
279
|
+
children = span.get("data", {}).get("children", [])
|
280
|
+
for child in children:
|
281
|
+
accumulate_metrics(child)
|
282
|
+
|
283
|
+
# Extract and attach spans
|
258
284
|
try:
|
259
|
-
|
285
|
+
spans = get_spans(input_trace, custom_model_cost)
|
286
|
+
final_trace["data"][0]["spans"] = spans
|
287
|
+
|
288
|
+
# Accumulate from root spans and their children
|
289
|
+
for span in spans:
|
290
|
+
accumulate_metrics(span)
|
260
291
|
except Exception as e:
|
261
292
|
raise Exception(f"Error in get_spans function: {e}")
|
262
|
-
final_trace["network_calls"] =[]
|
263
|
-
final_trace["interactions"] = []
|
264
|
-
|
265
|
-
for itr in final_trace["data"][0]["spans"]:
|
266
|
-
if itr["type"]=="llm":
|
267
|
-
if "tokens" in itr["info"]:
|
268
|
-
if "prompt_tokens" in itr["info"]["tokens"]:
|
269
|
-
final_trace["metadata"]["tokens"]["prompt_tokens"] += itr["info"]["tokens"].get('prompt_tokens', 0.0)
|
270
|
-
final_trace["metadata"]["cost"]["input_cost"] += itr["info"]["cost"].get('input_cost', 0.0)
|
271
|
-
if "completion_tokens" in itr["info"]["tokens"]:
|
272
|
-
final_trace["metadata"]["tokens"]["completion_tokens"] += itr["info"]["tokens"].get('completion_tokens', 0.0)
|
273
|
-
final_trace["metadata"]["cost"]["output_cost"] += itr["info"]["cost"].get('output_cost', 0.0)
|
274
|
-
if "tokens" in itr["info"]:
|
275
|
-
if "total_tokens" in itr["info"]["tokens"]:
|
276
|
-
final_trace["metadata"]["tokens"]["total_tokens"] += itr["info"]["tokens"].get('total_tokens', 0.0)
|
277
|
-
final_trace["metadata"]["cost"]["total_cost"] += itr["info"]["cost"].get('total_cost', 0.0)
|
278
293
|
|
279
|
-
#
|
280
|
-
final_trace["metadata"]["total_cost"] = final_trace["metadata"]["cost"]["total_cost"]
|
294
|
+
# Total metadata summary
|
295
|
+
final_trace["metadata"]["total_cost"] = final_trace["metadata"]["cost"]["total_cost"]
|
281
296
|
final_trace["metadata"]["total_tokens"] = final_trace["metadata"]["tokens"]["total_tokens"]
|
282
297
|
|
283
298
|
return final_trace
|
@@ -1,9 +1,9 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.6.1
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
|
6
|
-
Requires-Python: <3.13,>=3.
|
6
|
+
Requires-Python: <3.13,>=3.10
|
7
7
|
Description-Content-Type: text/markdown
|
8
8
|
License-File: LICENSE
|
9
9
|
Requires-Dist: aiohttp>=3.10.2
|
@@ -39,7 +39,7 @@ Requires-Dist: openinference-instrumentation-openai
|
|
39
39
|
Requires-Dist: openinference-instrumentation-bedrock
|
40
40
|
Requires-Dist: openinference-instrumentation-crewai
|
41
41
|
Requires-Dist: openinference-instrumentation-haystack
|
42
|
-
Requires-Dist: openinference-instrumentation-
|
42
|
+
Requires-Dist: openinference-instrumentation-openai-agents
|
43
43
|
Requires-Dist: openinference-instrumentation-smolagents
|
44
44
|
Requires-Dist: opentelemetry-sdk
|
45
45
|
Requires-Dist: opentelemetry-exporter-otlp
|
@@ -51,6 +51,7 @@ Requires-Dist: black; extra == "dev"
|
|
51
51
|
Requires-Dist: isort; extra == "dev"
|
52
52
|
Requires-Dist: mypy; extra == "dev"
|
53
53
|
Requires-Dist: flake8; extra == "dev"
|
54
|
+
Dynamic: license-file
|
54
55
|
|
55
56
|
# RagaAI Catalyst   
|
56
57
|
|
@@ -3,7 +3,7 @@ ragaai_catalyst/_version.py,sha256=JKt9KaVNOMVeGs8ojO6LvIZr7ZkMzNN-gCcvryy4x8E,4
|
|
3
3
|
ragaai_catalyst/dataset.py,sha256=YCj8Ovu6y38KEw-1HCe4xQWkmYPgfNTtMa8Q0g6B62o,29401
|
4
4
|
ragaai_catalyst/evaluation.py,sha256=O96CydYVPh3duUmXjY6REIXMOR-tOPixSG-Qhrf636A,22955
|
5
5
|
ragaai_catalyst/experiment.py,sha256=8yQo1phCHlpnJ-4CqCaIbLXg_1ZlAuLGI9kqGBl-OTE,18859
|
6
|
-
ragaai_catalyst/guard_executor.py,sha256=
|
6
|
+
ragaai_catalyst/guard_executor.py,sha256=f2FXQSW17z4-eor61J_mtD0z-xBm9yordq8giB-GN_U,14006
|
7
7
|
ragaai_catalyst/guardrails_manager.py,sha256=_VrARJ1udmCF8TklNKy7XTQUaM8ATDhTOAGDonBkFro,14245
|
8
8
|
ragaai_catalyst/internal_api_completion.py,sha256=DdICI5yfEudiOAIC8L4oxH0Qz7kX-BZCdo9IWsi2gNo,2965
|
9
9
|
ragaai_catalyst/prompt_manager.py,sha256=W8ypramzOprrJ7-22d5vkBXIuIQ8v9XAzKDGxKsTK28,16550
|
@@ -31,8 +31,8 @@ ragaai_catalyst/tracers/distributed.py,sha256=MwlBwIxCAng-OI-7Ove_rkE1mTLeuW4Jw-
|
|
31
31
|
ragaai_catalyst/tracers/langchain_callback.py,sha256=CB75zzG3-DkYTELj0vI1MOHQTY0MuQJfoHIXz9Cl8S8,34568
|
32
32
|
ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
|
33
33
|
ragaai_catalyst/tracers/llamaindex_instrumentation.py,sha256=Ys_jLkvVqo12bKgXDmkp4TxJu9HkBATrFE8cIcTYxWw,14329
|
34
|
-
ragaai_catalyst/tracers/tracer.py,sha256=
|
35
|
-
ragaai_catalyst/tracers/upload_traces.py,sha256=
|
34
|
+
ragaai_catalyst/tracers/tracer.py,sha256=CSaqVjg33KUEAhc7gcBhb8c_44I3I2wfyAwNqvz6Lu4,37003
|
35
|
+
ragaai_catalyst/tracers/upload_traces.py,sha256=nqH6Ldng33VhEGcvQberyWKZ1WvLuBRoySEit8b0f7s,5882
|
36
36
|
ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
|
37
37
|
ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
|
38
38
|
ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -45,20 +45,20 @@ ragaai_catalyst/tracers/agentic_tracing/tests/ai_travel_agent.py,sha256=S4rCcKzU
|
|
45
45
|
ragaai_catalyst/tracers/agentic_tracing/tests/unique_decorator_test.py,sha256=Xk1cLzs-2A3dgyBwRRnCWs7Eubki40FVonwd433hPN8,4805
|
46
46
|
ragaai_catalyst/tracers/agentic_tracing/tracers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
47
47
|
ragaai_catalyst/tracers/agentic_tracing/tracers/agent_tracer.py,sha256=LzbsHvELwBmH8ObFomJRhiQ98b6MEi18irm0DPiplt0,29743
|
48
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=
|
48
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/base.py,sha256=5O8GKv5U_L3biLpsk5HEW21yiKWCJ8H4ijYjHrTzcW4,54350
|
49
49
|
ragaai_catalyst/tracers/agentic_tracing/tracers/custom_tracer.py,sha256=OBJJjFSvwRjCGNJyqX3yIfC1W05ZN2QUXasCJ4gmCjQ,13930
|
50
50
|
ragaai_catalyst/tracers/agentic_tracing/tracers/langgraph_tracer.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
51
51
|
ragaai_catalyst/tracers/agentic_tracing/tracers/llm_tracer.py,sha256=z-qzmCQCkhyW0aLDUR_rNq4pmxhAaVhNY-kZQsox-Ws,50221
|
52
|
-
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=
|
52
|
+
ragaai_catalyst/tracers/agentic_tracing/tracers/main_tracer.py,sha256=9Kn6gppITCJ8obLyAGgfpWOlyqKfx2Zd4RkkMZMHYn8,16166
|
53
53
|
ragaai_catalyst/tracers/agentic_tracing/tracers/network_tracer.py,sha256=m8CxYkl7iMiFya_lNwN1ykBc3Pmo-2pR_2HmpptwHWQ,10352
|
54
54
|
ragaai_catalyst/tracers/agentic_tracing/tracers/tool_tracer.py,sha256=xxrliKPfdfbIZRZqMnUewsaTD8_Hv0dbuoBivNZGD4U,21674
|
55
55
|
ragaai_catalyst/tracers/agentic_tracing/tracers/user_interaction_tracer.py,sha256=bhSUhNQCuJXKjgJAXhjKEYjnHMpYN90FSZdR84fNIKU,4614
|
56
56
|
ragaai_catalyst/tracers/agentic_tracing/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
57
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=
|
58
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=
|
59
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=
|
57
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/trace_uploader.py,sha256=HREkAmANj21XqiR1Hf5chCpK8UjECEoryUjH4IuGEjA,12540
|
58
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_agentic_traces.py,sha256=fKg_3svwWDGDYmlqHCK4lXGq_LFpRzA3wmERRIDoqy4,8542
|
59
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_code.py,sha256=bRJGqdLPm0fTORAdUGRo0aDPiZeqB30gK_iC7SsymL4,6603
|
60
60
|
ragaai_catalyst/tracers/agentic_tracing/upload/upload_local_metric.py,sha256=m1O8lKpxKwtHofXLW3fTHX5yfqDW5GxoveARlg5cTw4,2571
|
61
|
-
ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py,sha256=
|
61
|
+
ragaai_catalyst/tracers/agentic_tracing/upload/upload_trace_metric.py,sha256=hRuh-cczHbeM_Spbf9HTYd149uSs1zP0TvkYuZKF4ec,4296
|
62
62
|
ragaai_catalyst/tracers/agentic_tracing/utils/__init__.py,sha256=XdB3X_ufe4RVvGorxSqAiB9dYv4UD7Hvvuw3bsDUppY,60
|
63
63
|
ragaai_catalyst/tracers/agentic_tracing/utils/api_utils.py,sha256=JyNCbfpW-w4O9CjtemTqmor2Rh1WGpQwhRaDSRmBxw8,689
|
64
64
|
ragaai_catalyst/tracers/agentic_tracing/utils/create_dataset_schema.py,sha256=xHTe7YiCmCY7kRxe0kB7wwo_ueT1UB_hnAA36R2m-pQ,941
|
@@ -74,24 +74,21 @@ ragaai_catalyst/tracers/agentic_tracing/utils/trace_utils.py,sha256=rssHolDvKxZ9
|
|
74
74
|
ragaai_catalyst/tracers/agentic_tracing/utils/unique_decorator.py,sha256=G027toV-Km20JjKrc-Y_PilQ8ABEKrBvvzgLTnqVg7I,5819
|
75
75
|
ragaai_catalyst/tracers/agentic_tracing/utils/zip_list_of_unique_files.py,sha256=4TeCGsFF26249fV6dJHLTZDrRa93SG9oer4rudoF8Y4,19443
|
76
76
|
ragaai_catalyst/tracers/exporters/__init__.py,sha256=wQbaqyeIjVZxYprHCKZ9BeiqxeXYBKjzEgP79LWNxCU,293
|
77
|
-
ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=
|
77
|
+
ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=oFIubzCCmC3fpoH_ybohevjOE5WIEG9ahcPIEtwn99c,5093
|
78
78
|
ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=RgGteu-NVGprXKkynvyIO5yOjpbtA41R3W_NzCjnkwE,6445
|
79
79
|
ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=6xvjWXyh8XPkHKSLLmAZUQSvwuyY17ov8pv2VdfI0qA,17875
|
80
|
-
ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=
|
81
|
-
ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=
|
82
|
-
ragaai_catalyst/tracers/instrumentators/langchain.py,sha256=yMN0qVF0pUVk6R5M1vJoUXezDo1ejs4klCFRlE8x4vE,574
|
83
|
-
ragaai_catalyst/tracers/instrumentators/llamaindex.py,sha256=SMrRlR4xM7k9HK43hakE8rkrWHxMlmtmWD-AX6TeByc,416
|
84
|
-
ragaai_catalyst/tracers/instrumentators/openai.py,sha256=14R4KW9wQCR1xysLfsP_nxS7cqXrTPoD8En4MBAaZUU,379
|
80
|
+
ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=n78Yky-32yVI8fiL_3yD-iShgizb8IcN8OqQ5gOwZ9w,5387
|
81
|
+
ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
85
82
|
ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
|
86
83
|
ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
|
87
84
|
ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py,sha256=8qLo7x4Zsn3dhJfSv9gviB60YXZ2TOsWEouucJmBM0c,1724
|
88
85
|
ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py,sha256=ZhPs0YhVtB82-Pq9o1BvCinKE_WPvVxPTEcZjlJbFYM,2371
|
89
86
|
ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=XS2_x2qneqEx9oAighLg-LRiueWcESLwIC2r7eJT-Ww,3117
|
90
87
|
ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256=C3uwkibJ08C9sOX-54kulZYmJlIpZ-SQpfE6HNGrjbM,343502
|
91
|
-
ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=
|
88
|
+
ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=RH9dkCnPaSygvPvAilRE4lUdUaRtALJKH85E4jHcVoM,14072
|
92
89
|
ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
|
93
|
-
ragaai_catalyst-2.1.
|
94
|
-
ragaai_catalyst-2.1.
|
95
|
-
ragaai_catalyst-2.1.
|
96
|
-
ragaai_catalyst-2.1.
|
97
|
-
ragaai_catalyst-2.1.
|
90
|
+
ragaai_catalyst-2.1.6.1.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
91
|
+
ragaai_catalyst-2.1.6.1.dist-info/METADATA,sha256=jwIl1_mf_YsEQvnTruG2ki9kl34EhgSxpjJWBVGGOH0,22139
|
92
|
+
ragaai_catalyst-2.1.6.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
93
|
+
ragaai_catalyst-2.1.6.1.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
|
94
|
+
ragaai_catalyst-2.1.6.1.dist-info/RECORD,,
|
@@ -1,14 +0,0 @@
|
|
1
|
-
from importlib.util import find_spec
|
2
|
-
from opentelemetry.instrumentation.langchain import LangchainInstrumentor
|
3
|
-
|
4
|
-
|
5
|
-
class Langchain:
|
6
|
-
def __init__(self) -> None:
|
7
|
-
# Check if the necessary part of the 'opentelemetry' package is installed
|
8
|
-
if find_spec("opentelemetry.instrumentation.langchain") is None:
|
9
|
-
raise ModuleNotFoundError(
|
10
|
-
"Missing `opentelemetry-instrumentation-langchain` component. Install with `pip install opentelemetry-instrumentation-langchain`."
|
11
|
-
)
|
12
|
-
|
13
|
-
def get(self):
|
14
|
-
return LangchainInstrumentor
|
@@ -1,14 +0,0 @@
|
|
1
|
-
from importlib.util import find_spec
|
2
|
-
|
3
|
-
|
4
|
-
class LlamaIndex:
|
5
|
-
def __init__(self) -> None:
|
6
|
-
if find_spec("llamaindex") is None:
|
7
|
-
raise ModuleNotFoundError(
|
8
|
-
"Missing `llamaindex` package. Install with `pip install llamaindex`."
|
9
|
-
)
|
10
|
-
|
11
|
-
def get(self):
|
12
|
-
from opentelemetry.instrumentation.llamaindex import LlamaIndexInstrumentor
|
13
|
-
|
14
|
-
return LlamaIndexInstrumentor
|
@@ -1,13 +0,0 @@
|
|
1
|
-
from importlib.util import find_spec
|
2
|
-
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
|
3
|
-
|
4
|
-
|
5
|
-
class OpenAI:
|
6
|
-
def __init__(self) -> None:
|
7
|
-
if find_spec("openai") is None:
|
8
|
-
raise ModuleNotFoundError(
|
9
|
-
"Missing `openai` package. Install with `pip install openai`."
|
10
|
-
)
|
11
|
-
|
12
|
-
def get(self):
|
13
|
-
return OpenAIInstrumentor
|
File without changes
|
File without changes
|