ragaai-catalyst 2.1.6b1__py3-none-any.whl → 2.1.6.1b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ragaai_catalyst/guard_executor.py +1 -0
- ragaai_catalyst/tracers/instrumentators/__init__.py +0 -5
- ragaai_catalyst/tracers/tracer.py +21 -6
- ragaai_catalyst/tracers/upload_traces.py +5 -2
- ragaai_catalyst/tracers/utils/trace_json_converter.py +39 -24
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1b0.dist-info}/METADATA +5 -4
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1b0.dist-info}/RECORD +10 -13
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1b0.dist-info}/WHEEL +1 -1
- ragaai_catalyst/tracers/instrumentators/langchain.py +0 -14
- ragaai_catalyst/tracers/instrumentators/llamaindex.py +0 -14
- ragaai_catalyst/tracers/instrumentators/openai.py +0 -13
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1b0.dist-info/licenses}/LICENSE +0 -0
- {ragaai_catalyst-2.1.6b1.dist-info → ragaai_catalyst-2.1.6.1b0.dist-info}/top_level.txt +0 -0
@@ -164,6 +164,7 @@ class GuardExecutor:
|
|
164
164
|
return doc
|
165
165
|
|
166
166
|
def execute_input_guardrails(self, prompt, prompt_params):
|
167
|
+
self.current_trace_id =None
|
167
168
|
doc = self.set_variables(prompt,prompt_params)
|
168
169
|
deployment_response = self.execute_deployment(self.input_deployment_id,doc)
|
169
170
|
self.current_trace_id = deployment_response['data']['results'][0]['executionId']
|
@@ -21,11 +21,6 @@ from opentelemetry.sdk import trace as trace_sdk
|
|
21
21
|
from opentelemetry.sdk.trace.export import SimpleSpanProcessor
|
22
22
|
from ragaai_catalyst.tracers.exporters.file_span_exporter import FileSpanExporter
|
23
23
|
from ragaai_catalyst.tracers.exporters.raga_exporter import RagaExporter
|
24
|
-
from ragaai_catalyst.tracers.instrumentators import (
|
25
|
-
LangchainInstrumentor,
|
26
|
-
OpenAIInstrumentor,
|
27
|
-
LlamaIndexInstrumentor,
|
28
|
-
)
|
29
24
|
from ragaai_catalyst.tracers.utils import get_unique_key
|
30
25
|
# from ragaai_catalyst.tracers.llamaindex_callback import LlamaIndexTracer
|
31
26
|
from ragaai_catalyst.tracers.llamaindex_instrumentation import LlamaIndexInstrumentationTracer
|
@@ -283,6 +278,14 @@ class Tracer(AgenticTracing):
|
|
283
278
|
logger.info("Instrumenting Smolagents...")
|
284
279
|
except (ImportError, ModuleNotFoundError):
|
285
280
|
logger.debug("Smolagents not available in environment")
|
281
|
+
|
282
|
+
# OpenAI Agents
|
283
|
+
try:
|
284
|
+
from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
|
285
|
+
instrumentors.append((OpenAIAgentsInstrumentor, []))
|
286
|
+
logger.info("Instrumenting OpenAI Agents...")
|
287
|
+
except (ImportError, ModuleNotFoundError):
|
288
|
+
logger.debug("OpenAI Agents not available in environment")
|
286
289
|
|
287
290
|
if not instrumentors:
|
288
291
|
logger.warning("No agentic packages found in environment to instrument")
|
@@ -298,7 +301,7 @@ class Tracer(AgenticTracing):
|
|
298
301
|
elif tracer_type == "agentic/llamaindex":
|
299
302
|
from openinference.instrumentation.llama_index import LlamaIndexInstrumentor
|
300
303
|
instrumentors += [(LlamaIndexInstrumentor, [])]
|
301
|
-
|
304
|
+
|
302
305
|
elif tracer_type == "agentic/langchain" or tracer_type == "agentic/langgraph":
|
303
306
|
from openinference.instrumentation.langchain import LangChainInstrumentor
|
304
307
|
instrumentors += [(LangChainInstrumentor, [])]
|
@@ -319,6 +322,10 @@ class Tracer(AgenticTracing):
|
|
319
322
|
elif tracer_type == "agentic/smolagents":
|
320
323
|
from openinference.instrumentation.smolagents import SmolagentsInstrumentor
|
321
324
|
instrumentors += [(SmolagentsInstrumentor, [])]
|
325
|
+
|
326
|
+
elif tracer_type == "agentic/openai_agents":
|
327
|
+
from openinference.instrumentation.openai_agents import OpenAIAgentsInstrumentor
|
328
|
+
instrumentors += [(OpenAIAgentsInstrumentor, [])]
|
322
329
|
|
323
330
|
else:
|
324
331
|
# Unknown agentic tracer type
|
@@ -518,6 +525,14 @@ class Tracer(AgenticTracing):
|
|
518
525
|
combined_metadata.update(user_detail['trace_user_detail']['metadata'])
|
519
526
|
if additional_metadata:
|
520
527
|
combined_metadata.update(additional_metadata)
|
528
|
+
|
529
|
+
model_cost_latency_metadata = {}
|
530
|
+
if additional_metadata:
|
531
|
+
model_cost_latency_metadata["model"] = additional_metadata["model_name"]
|
532
|
+
model_cost_latency_metadata["total_cost"] = additional_metadata["cost"]
|
533
|
+
model_cost_latency_metadata["total_latency"] = additional_metadata["latency"]
|
534
|
+
model_cost_latency_metadata["recorded_on"] = datetime.datetime.now().astimezone().isoformat()
|
535
|
+
combined_metadata.update(model_cost_latency_metadata)
|
521
536
|
|
522
537
|
langchain_traces = langchain_tracer_extraction(data, self.user_context)
|
523
538
|
final_result = convert_langchain_callbacks_output(langchain_traces)
|
@@ -28,11 +28,14 @@ class UploadTraces:
|
|
28
28
|
"response":{"columnType": "response"},
|
29
29
|
"context": {"columnType": "context"},
|
30
30
|
"llm_model": {"columnType":"pipeline"},
|
31
|
-
"recorded_on": {"columnType": "
|
31
|
+
"recorded_on": {"columnType": "timestamp"},
|
32
32
|
"embed_model": {"columnType":"pipeline"},
|
33
33
|
"log_source": {"columnType": "metadata"},
|
34
34
|
"vector_store":{"columnType":"pipeline"},
|
35
|
-
"feedback": {"columnType":"feedBack"}
|
35
|
+
"feedback": {"columnType":"feedBack"},
|
36
|
+
"model": {"columnType": "metadata"},
|
37
|
+
"total_cost": {"columnType": "metadata", "dataType": "numerical"},
|
38
|
+
"total_latency": {"columnType": "metadata", "dataType": "numerical"},
|
36
39
|
}
|
37
40
|
|
38
41
|
if additional_metadata_keys:
|
@@ -226,7 +226,7 @@ def get_spans(input_trace, custom_model_cost):
|
|
226
226
|
|
227
227
|
def convert_json_format(input_trace, custom_model_cost):
|
228
228
|
"""
|
229
|
-
Converts a JSON from one format to UI format.
|
229
|
+
Converts a JSON from one format to UI format, handling nested spans.
|
230
230
|
|
231
231
|
Args:
|
232
232
|
input_trace (str): The input JSON string.
|
@@ -238,8 +238,8 @@ def convert_json_format(input_trace, custom_model_cost):
|
|
238
238
|
"id": input_trace[0]["context"]["trace_id"],
|
239
239
|
"trace_name": "",
|
240
240
|
"project_name": "",
|
241
|
-
"start_time": convert_time_format(min(item["start_time"] for item in input_trace)),
|
242
|
-
"end_time": convert_time_format(max(item["end_time"] for item in input_trace))
|
241
|
+
"start_time": convert_time_format(min(item["start_time"] for item in input_trace)),
|
242
|
+
"end_time": convert_time_format(max(item["end_time"] for item in input_trace))
|
243
243
|
}
|
244
244
|
final_trace["metadata"] = {
|
245
245
|
"tokens": {
|
@@ -253,31 +253,46 @@ def convert_json_format(input_trace, custom_model_cost):
|
|
253
253
|
"total_cost": 0.0
|
254
254
|
}
|
255
255
|
}
|
256
|
-
final_trace["replays"]={"source":None}
|
257
|
-
final_trace["data"]=[{}]
|
256
|
+
final_trace["replays"] = {"source": None}
|
257
|
+
final_trace["data"] = [{}]
|
258
|
+
final_trace["network_calls"] = []
|
259
|
+
final_trace["interactions"] = []
|
260
|
+
|
261
|
+
# import pdb; pdb.set_trace()
|
262
|
+
|
263
|
+
# Helper to recursively extract cost/token info from all spans
|
264
|
+
def accumulate_metrics(span):
|
265
|
+
if span["type"] == "llm" and "info" in span:
|
266
|
+
info = span["info"]
|
267
|
+
cost = info.get("cost", {})
|
268
|
+
tokens = info.get("tokens", {})
|
269
|
+
|
270
|
+
final_trace["metadata"]["tokens"]["prompt_tokens"] += tokens.get("prompt_tokens", 0.0)
|
271
|
+
final_trace["metadata"]["tokens"]["completion_tokens"] += tokens.get("completion_tokens", 0.0)
|
272
|
+
final_trace["metadata"]["tokens"]["total_tokens"] += tokens.get("total_tokens", 0.0)
|
273
|
+
|
274
|
+
final_trace["metadata"]["cost"]["input_cost"] += cost.get("input_cost", 0.0)
|
275
|
+
final_trace["metadata"]["cost"]["output_cost"] += cost.get("output_cost", 0.0)
|
276
|
+
final_trace["metadata"]["cost"]["total_cost"] += cost.get("total_cost", 0.0)
|
277
|
+
|
278
|
+
# Recursively process children
|
279
|
+
children = span.get("data", {}).get("children", [])
|
280
|
+
for child in children:
|
281
|
+
accumulate_metrics(child)
|
282
|
+
|
283
|
+
# Extract and attach spans
|
258
284
|
try:
|
259
|
-
|
285
|
+
spans = get_spans(input_trace, custom_model_cost)
|
286
|
+
final_trace["data"][0]["spans"] = spans
|
287
|
+
|
288
|
+
# Accumulate from root spans and their children
|
289
|
+
for span in spans:
|
290
|
+
accumulate_metrics(span)
|
260
291
|
except Exception as e:
|
261
292
|
raise Exception(f"Error in get_spans function: {e}")
|
262
|
-
final_trace["network_calls"] =[]
|
263
|
-
final_trace["interactions"] = []
|
264
|
-
|
265
|
-
for itr in final_trace["data"][0]["spans"]:
|
266
|
-
if itr["type"]=="llm":
|
267
|
-
if "tokens" in itr["info"]:
|
268
|
-
if "prompt_tokens" in itr["info"]["tokens"]:
|
269
|
-
final_trace["metadata"]["tokens"]["prompt_tokens"] += itr["info"]["tokens"].get('prompt_tokens', 0.0)
|
270
|
-
final_trace["metadata"]["cost"]["input_cost"] += itr["info"]["cost"].get('input_cost', 0.0)
|
271
|
-
if "completion_tokens" in itr["info"]["tokens"]:
|
272
|
-
final_trace["metadata"]["tokens"]["completion_tokens"] += itr["info"]["tokens"].get('completion_tokens', 0.0)
|
273
|
-
final_trace["metadata"]["cost"]["output_cost"] += itr["info"]["cost"].get('output_cost', 0.0)
|
274
|
-
if "tokens" in itr["info"]:
|
275
|
-
if "total_tokens" in itr["info"]["tokens"]:
|
276
|
-
final_trace["metadata"]["tokens"]["total_tokens"] += itr["info"]["tokens"].get('total_tokens', 0.0)
|
277
|
-
final_trace["metadata"]["cost"]["total_cost"] += itr["info"]["cost"].get('total_cost', 0.0)
|
278
293
|
|
279
|
-
#
|
280
|
-
final_trace["metadata"]["total_cost"] = final_trace["metadata"]["cost"]["total_cost"]
|
294
|
+
# Total metadata summary
|
295
|
+
final_trace["metadata"]["total_cost"] = final_trace["metadata"]["cost"]["total_cost"]
|
281
296
|
final_trace["metadata"]["total_tokens"] = final_trace["metadata"]["tokens"]["total_tokens"]
|
282
297
|
|
283
298
|
return final_trace
|
@@ -1,9 +1,9 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: ragaai_catalyst
|
3
|
-
Version: 2.1.
|
3
|
+
Version: 2.1.6.1b0
|
4
4
|
Summary: RAGA AI CATALYST
|
5
5
|
Author-email: Kiran Scaria <kiran.scaria@raga.ai>, Kedar Gaikwad <kedar.gaikwad@raga.ai>, Dushyant Mahajan <dushyant.mahajan@raga.ai>, Siddhartha Kosti <siddhartha.kosti@raga.ai>, Ritika Goel <ritika.goel@raga.ai>, Vijay Chaurasia <vijay.chaurasia@raga.ai>, Tushar Kumar <tushar.kumar@raga.ai>
|
6
|
-
Requires-Python: <3.13,>=3.
|
6
|
+
Requires-Python: <3.13,>=3.10
|
7
7
|
Description-Content-Type: text/markdown
|
8
8
|
License-File: LICENSE
|
9
9
|
Requires-Dist: aiohttp>=3.10.2
|
@@ -39,7 +39,7 @@ Requires-Dist: openinference-instrumentation-openai
|
|
39
39
|
Requires-Dist: openinference-instrumentation-bedrock
|
40
40
|
Requires-Dist: openinference-instrumentation-crewai
|
41
41
|
Requires-Dist: openinference-instrumentation-haystack
|
42
|
-
Requires-Dist: openinference-instrumentation-
|
42
|
+
Requires-Dist: openinference-instrumentation-openai-agents
|
43
43
|
Requires-Dist: openinference-instrumentation-smolagents
|
44
44
|
Requires-Dist: opentelemetry-sdk
|
45
45
|
Requires-Dist: opentelemetry-exporter-otlp
|
@@ -51,6 +51,7 @@ Requires-Dist: black; extra == "dev"
|
|
51
51
|
Requires-Dist: isort; extra == "dev"
|
52
52
|
Requires-Dist: mypy; extra == "dev"
|
53
53
|
Requires-Dist: flake8; extra == "dev"
|
54
|
+
Dynamic: license-file
|
54
55
|
|
55
56
|
# RagaAI Catalyst   
|
56
57
|
|
@@ -3,7 +3,7 @@ ragaai_catalyst/_version.py,sha256=JKt9KaVNOMVeGs8ojO6LvIZr7ZkMzNN-gCcvryy4x8E,4
|
|
3
3
|
ragaai_catalyst/dataset.py,sha256=YCj8Ovu6y38KEw-1HCe4xQWkmYPgfNTtMa8Q0g6B62o,29401
|
4
4
|
ragaai_catalyst/evaluation.py,sha256=O96CydYVPh3duUmXjY6REIXMOR-tOPixSG-Qhrf636A,22955
|
5
5
|
ragaai_catalyst/experiment.py,sha256=8yQo1phCHlpnJ-4CqCaIbLXg_1ZlAuLGI9kqGBl-OTE,18859
|
6
|
-
ragaai_catalyst/guard_executor.py,sha256=
|
6
|
+
ragaai_catalyst/guard_executor.py,sha256=f2FXQSW17z4-eor61J_mtD0z-xBm9yordq8giB-GN_U,14006
|
7
7
|
ragaai_catalyst/guardrails_manager.py,sha256=_VrARJ1udmCF8TklNKy7XTQUaM8ATDhTOAGDonBkFro,14245
|
8
8
|
ragaai_catalyst/internal_api_completion.py,sha256=DdICI5yfEudiOAIC8L4oxH0Qz7kX-BZCdo9IWsi2gNo,2965
|
9
9
|
ragaai_catalyst/prompt_manager.py,sha256=W8ypramzOprrJ7-22d5vkBXIuIQ8v9XAzKDGxKsTK28,16550
|
@@ -31,8 +31,8 @@ ragaai_catalyst/tracers/distributed.py,sha256=MwlBwIxCAng-OI-7Ove_rkE1mTLeuW4Jw-
|
|
31
31
|
ragaai_catalyst/tracers/langchain_callback.py,sha256=CB75zzG3-DkYTELj0vI1MOHQTY0MuQJfoHIXz9Cl8S8,34568
|
32
32
|
ragaai_catalyst/tracers/llamaindex_callback.py,sha256=ZY0BJrrlz-P9Mg2dX-ZkVKG3gSvzwqBtk7JL_05MiYA,14028
|
33
33
|
ragaai_catalyst/tracers/llamaindex_instrumentation.py,sha256=Ys_jLkvVqo12bKgXDmkp4TxJu9HkBATrFE8cIcTYxWw,14329
|
34
|
-
ragaai_catalyst/tracers/tracer.py,sha256=
|
35
|
-
ragaai_catalyst/tracers/upload_traces.py,sha256=
|
34
|
+
ragaai_catalyst/tracers/tracer.py,sha256=juMsA9qkKffoSWYx3a-iTNCY6SnKxjMoYvTSGQ-r-Gk,37080
|
35
|
+
ragaai_catalyst/tracers/upload_traces.py,sha256=nqH6Ldng33VhEGcvQberyWKZ1WvLuBRoySEit8b0f7s,5882
|
36
36
|
ragaai_catalyst/tracers/agentic_tracing/README.md,sha256=X4QwLb7-Jg7GQMIXj-SerZIgDETfw-7VgYlczOR8ZeQ,4508
|
37
37
|
ragaai_catalyst/tracers/agentic_tracing/__init__.py,sha256=yf6SKvOPSpH-9LiKaoLKXwqj5sez8F_5wkOb91yp0oE,260
|
38
38
|
ragaai_catalyst/tracers/agentic_tracing/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -78,20 +78,17 @@ ragaai_catalyst/tracers/exporters/dynamic_trace_exporter.py,sha256=w9U8UTxvTbGTD
|
|
78
78
|
ragaai_catalyst/tracers/exporters/file_span_exporter.py,sha256=RgGteu-NVGprXKkynvyIO5yOjpbtA41R3W_NzCjnkwE,6445
|
79
79
|
ragaai_catalyst/tracers/exporters/raga_exporter.py,sha256=6xvjWXyh8XPkHKSLLmAZUQSvwuyY17ov8pv2VdfI0qA,17875
|
80
80
|
ragaai_catalyst/tracers/exporters/ragaai_trace_exporter.py,sha256=HZG1UjcipgQOHkeqQHVGxenIab2mHqcVmWqtOXlMt6Q,5305
|
81
|
-
ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=
|
82
|
-
ragaai_catalyst/tracers/instrumentators/langchain.py,sha256=yMN0qVF0pUVk6R5M1vJoUXezDo1ejs4klCFRlE8x4vE,574
|
83
|
-
ragaai_catalyst/tracers/instrumentators/llamaindex.py,sha256=SMrRlR4xM7k9HK43hakE8rkrWHxMlmtmWD-AX6TeByc,416
|
84
|
-
ragaai_catalyst/tracers/instrumentators/openai.py,sha256=14R4KW9wQCR1xysLfsP_nxS7cqXrTPoD8En4MBAaZUU,379
|
81
|
+
ragaai_catalyst/tracers/instrumentators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
85
82
|
ragaai_catalyst/tracers/utils/__init__.py,sha256=KeMaZtYaTojilpLv65qH08QmpYclfpacDA0U3wg6Ybw,64
|
86
83
|
ragaai_catalyst/tracers/utils/convert_langchain_callbacks_output.py,sha256=ofrNrxf2b1hpjDh_zeaxiYq86azn1MF3kW8-ViYPEg0,1641
|
87
84
|
ragaai_catalyst/tracers/utils/convert_llama_instru_callback.py,sha256=8qLo7x4Zsn3dhJfSv9gviB60YXZ2TOsWEouucJmBM0c,1724
|
88
85
|
ragaai_catalyst/tracers/utils/extraction_logic_llama_index.py,sha256=ZhPs0YhVtB82-Pq9o1BvCinKE_WPvVxPTEcZjlJbFYM,2371
|
89
86
|
ragaai_catalyst/tracers/utils/langchain_tracer_extraction_logic.py,sha256=XS2_x2qneqEx9oAighLg-LRiueWcESLwIC2r7eJT-Ww,3117
|
90
87
|
ragaai_catalyst/tracers/utils/model_prices_and_context_window_backup.json,sha256=C3uwkibJ08C9sOX-54kulZYmJlIpZ-SQpfE6HNGrjbM,343502
|
91
|
-
ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=
|
88
|
+
ragaai_catalyst/tracers/utils/trace_json_converter.py,sha256=RH9dkCnPaSygvPvAilRE4lUdUaRtALJKH85E4jHcVoM,14072
|
92
89
|
ragaai_catalyst/tracers/utils/utils.py,sha256=ViygfJ7vZ7U0CTSA1lbxVloHp4NSlmfDzBRNCJuMhis,2374
|
93
|
-
ragaai_catalyst-2.1.
|
94
|
-
ragaai_catalyst-2.1.
|
95
|
-
ragaai_catalyst-2.1.
|
96
|
-
ragaai_catalyst-2.1.
|
97
|
-
ragaai_catalyst-2.1.
|
90
|
+
ragaai_catalyst-2.1.6.1b0.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
91
|
+
ragaai_catalyst-2.1.6.1b0.dist-info/METADATA,sha256=taTMHbwPGhj24OzzdWn_pNo03EFeqgi90NzeOWVNVUM,22141
|
92
|
+
ragaai_catalyst-2.1.6.1b0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
93
|
+
ragaai_catalyst-2.1.6.1b0.dist-info/top_level.txt,sha256=HpgsdRgEJMk8nqrU6qdCYk3di7MJkDL0B19lkc7dLfM,16
|
94
|
+
ragaai_catalyst-2.1.6.1b0.dist-info/RECORD,,
|
@@ -1,14 +0,0 @@
|
|
1
|
-
from importlib.util import find_spec
|
2
|
-
from opentelemetry.instrumentation.langchain import LangchainInstrumentor
|
3
|
-
|
4
|
-
|
5
|
-
class Langchain:
|
6
|
-
def __init__(self) -> None:
|
7
|
-
# Check if the necessary part of the 'opentelemetry' package is installed
|
8
|
-
if find_spec("opentelemetry.instrumentation.langchain") is None:
|
9
|
-
raise ModuleNotFoundError(
|
10
|
-
"Missing `opentelemetry-instrumentation-langchain` component. Install with `pip install opentelemetry-instrumentation-langchain`."
|
11
|
-
)
|
12
|
-
|
13
|
-
def get(self):
|
14
|
-
return LangchainInstrumentor
|
@@ -1,14 +0,0 @@
|
|
1
|
-
from importlib.util import find_spec
|
2
|
-
|
3
|
-
|
4
|
-
class LlamaIndex:
|
5
|
-
def __init__(self) -> None:
|
6
|
-
if find_spec("llamaindex") is None:
|
7
|
-
raise ModuleNotFoundError(
|
8
|
-
"Missing `llamaindex` package. Install with `pip install llamaindex`."
|
9
|
-
)
|
10
|
-
|
11
|
-
def get(self):
|
12
|
-
from opentelemetry.instrumentation.llamaindex import LlamaIndexInstrumentor
|
13
|
-
|
14
|
-
return LlamaIndexInstrumentor
|
@@ -1,13 +0,0 @@
|
|
1
|
-
from importlib.util import find_spec
|
2
|
-
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
|
3
|
-
|
4
|
-
|
5
|
-
class OpenAI:
|
6
|
-
def __init__(self) -> None:
|
7
|
-
if find_spec("openai") is None:
|
8
|
-
raise ModuleNotFoundError(
|
9
|
-
"Missing `openai` package. Install with `pip install openai`."
|
10
|
-
)
|
11
|
-
|
12
|
-
def get(self):
|
13
|
-
return OpenAIInstrumentor
|
File without changes
|
File without changes
|