openlit 1.33.2__py3-none-any.whl → 1.33.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openlit/instrumentation/astra/astra.py +1 -1
- openlit/instrumentation/astra/async_astra.py +1 -1
- openlit/instrumentation/langchain/langchain.py +25 -10
- openlit/instrumentation/openai/async_openai.py +2 -1
- openlit/instrumentation/openai/openai.py +2 -1
- {openlit-1.33.2.dist-info → openlit-1.33.4.dist-info}/METADATA +4 -3
- {openlit-1.33.2.dist-info → openlit-1.33.4.dist-info}/RECORD +9 -9
- {openlit-1.33.2.dist-info → openlit-1.33.4.dist-info}/WHEEL +1 -1
- {openlit-1.33.2.dist-info → openlit-1.33.4.dist-info}/LICENSE +0 -0
@@ -87,7 +87,7 @@ def general_wrap(gen_ai_endpoint, version, environment, application_name,
|
|
87
87
|
span.set_attribute(SemanticConvetion.DB_INDEX_DIMENSION,
|
88
88
|
kwargs.get("dimension", ""))
|
89
89
|
span.set_attribute(SemanticConvetion.DB_INDEX_METRIC,
|
90
|
-
kwargs.get("metric", ""))
|
90
|
+
str(kwargs.get("metric", "")))
|
91
91
|
span.set_attribute(SemanticConvetion.DB_OPERATION_API_ENDPOINT,
|
92
92
|
instance.api_endpoint)
|
93
93
|
|
@@ -87,7 +87,7 @@ def general_wrap(gen_ai_endpoint, version, environment, application_name,
|
|
87
87
|
span.set_attribute(SemanticConvetion.DB_INDEX_DIMENSION,
|
88
88
|
kwargs.get("dimension", ""))
|
89
89
|
span.set_attribute(SemanticConvetion.DB_INDEX_METRIC,
|
90
|
-
kwargs.get("metric", ""))
|
90
|
+
str(kwargs.get("metric", "")))
|
91
91
|
span.set_attribute(SemanticConvetion.DB_OPERATION_API_ENDPOINT,
|
92
92
|
instance.api_endpoint)
|
93
93
|
|
@@ -508,11 +508,11 @@ def chat(gen_ai_endpoint, version, environment, application_name,
|
|
508
508
|
|
509
509
|
try:
|
510
510
|
prompt = ""
|
511
|
-
if hasattr(response, '
|
512
|
-
token_usage = response.
|
513
|
-
input_tokens = token_usage.get("
|
514
|
-
output_tokens = token_usage.get("
|
515
|
-
model =
|
511
|
+
if hasattr(response, 'usage_metadata') and response.usage_metadata:
|
512
|
+
token_usage = response.usage_metadata
|
513
|
+
input_tokens = token_usage.get("input_tokens", 0)
|
514
|
+
output_tokens = token_usage.get("output_tokens", 0)
|
515
|
+
model = instance.model_id
|
516
516
|
prompt = "" if isinstance(args[0], list) else args[0]
|
517
517
|
else:
|
518
518
|
if not isinstance(response, dict) or "output_text" not in response:
|
@@ -661,12 +661,27 @@ def achat(gen_ai_endpoint, version, environment, application_name,
|
|
661
661
|
response = await wrapped(*args, **kwargs)
|
662
662
|
|
663
663
|
try:
|
664
|
-
|
665
|
-
|
666
|
-
|
667
|
-
|
664
|
+
prompt = ""
|
665
|
+
if hasattr(response, 'usage_metadata') and response.usage_metadata:
|
666
|
+
token_usage = response.usage_metadata
|
667
|
+
input_tokens = token_usage.get("input_tokens", 0)
|
668
|
+
output_tokens = token_usage.get("output_tokens", 0)
|
669
|
+
model = instance.model_id
|
670
|
+
prompt = "" if isinstance(args[0], list) else args[0]
|
671
|
+
|
672
|
+
else:
|
673
|
+
if not isinstance(response, dict) or "output_text" not in response:
|
674
|
+
return response
|
675
|
+
# Fallback: Calculate tokens manually if response_metadata is missing
|
676
|
+
model = "gpt-4o-mini" # Fallback model
|
677
|
+
input_texts = [
|
678
|
+
doc.page_content for doc in response.get("input_documents", [])
|
679
|
+
if isinstance(doc.page_content, str)
|
680
|
+
]
|
681
|
+
input_tokens = sum(general_tokens(text) for text in input_texts)
|
682
|
+
output_text = response.get("output_text", "")
|
683
|
+
output_tokens = general_tokens(output_text)
|
668
684
|
|
669
|
-
prompt = "" if isinstance(args[0], list) else args[0]
|
670
685
|
# Calculate cost of the operation
|
671
686
|
cost = get_chat_model_cost(
|
672
687
|
model,
|
@@ -478,11 +478,12 @@ def async_embedding(gen_ai_endpoint, version, environment, application_name,
|
|
478
478
|
response_dict.get('usage').get('total_tokens'))
|
479
479
|
span.set_attribute(SemanticConvetion.GEN_AI_USAGE_COST,
|
480
480
|
cost)
|
481
|
+
|
481
482
|
if trace_content:
|
482
483
|
span.add_event(
|
483
484
|
name=SemanticConvetion.GEN_AI_CONTENT_PROMPT_EVENT,
|
484
485
|
attributes={
|
485
|
-
SemanticConvetion.GEN_AI_CONTENT_PROMPT: kwargs.get("input", ""),
|
486
|
+
SemanticConvetion.GEN_AI_CONTENT_PROMPT: str(kwargs.get("input", "")),
|
486
487
|
},
|
487
488
|
)
|
488
489
|
|
@@ -481,11 +481,12 @@ def embedding(gen_ai_endpoint, version, environment, application_name,
|
|
481
481
|
response_dict.get('usage').get('total_tokens'))
|
482
482
|
span.set_attribute(SemanticConvetion.GEN_AI_USAGE_COST,
|
483
483
|
cost)
|
484
|
+
|
484
485
|
if trace_content:
|
485
486
|
span.add_event(
|
486
487
|
name=SemanticConvetion.GEN_AI_CONTENT_PROMPT_EVENT,
|
487
488
|
attributes={
|
488
|
-
SemanticConvetion.GEN_AI_CONTENT_PROMPT: kwargs.get("input", ""),
|
489
|
+
SemanticConvetion.GEN_AI_CONTENT_PROMPT: str(kwargs.get("input", "")),
|
489
490
|
},
|
490
491
|
)
|
491
492
|
|
@@ -1,8 +1,7 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.3
|
2
2
|
Name: openlit
|
3
|
-
Version: 1.33.
|
3
|
+
Version: 1.33.4
|
4
4
|
Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
|
5
|
-
Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
|
6
5
|
Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
|
7
6
|
Author: OpenLIT
|
8
7
|
Requires-Python: >=3.7.1,<4.0.0
|
@@ -26,6 +25,7 @@ Requires-Dist: requests (>=2.26.0,<3.0.0)
|
|
26
25
|
Requires-Dist: schedule (>=1.2.2,<2.0.0)
|
27
26
|
Requires-Dist: tiktoken (>=0.7.0,<0.8.0)
|
28
27
|
Requires-Dist: xmltodict (>=0.13.0,<0.14.0)
|
28
|
+
Project-URL: Homepage, https://github.com/openlit/openlit/tree/main/openlit/python
|
29
29
|
Project-URL: Repository, https://github.com/openlit/openlit/tree/main/openlit/python
|
30
30
|
Description-Content-Type: text/markdown
|
31
31
|
|
@@ -106,6 +106,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
|
|
106
106
|
- [✅ Dynatrace](https://docs.openlit.io/latest/connections/dynatrace)
|
107
107
|
- [✅ OpenObserve](https://docs.openlit.io/latest/connections/openobserve)
|
108
108
|
- [✅ Highlight.io](https://docs.openlit.io/latest/connections/highlight)
|
109
|
+
- [✅ SigLens](https://docs.openlit.io/latest/connections/siglens)
|
109
110
|
|
110
111
|
## 💿 Installation
|
111
112
|
|
@@ -23,8 +23,8 @@ openlit/instrumentation/anthropic/async_anthropic.py,sha256=Zz1KRKIG9wGn0quOoLvj
|
|
23
23
|
openlit/instrumentation/assemblyai/__init__.py,sha256=h5AADJkkqZA4IvUZ6rn8P32eVSmD15LgdcPlBX23Ve0,1560
|
24
24
|
openlit/instrumentation/assemblyai/assemblyai.py,sha256=muOq9C5JigG8N2Yd90j128LUKz8r-Gb3rOYSAjrarDM,5710
|
25
25
|
openlit/instrumentation/astra/__init__.py,sha256=G4alCOR6hXQvUQPDCS8lY1rj0Mz-KmrW5vVWk5loO78,8054
|
26
|
-
openlit/instrumentation/astra/astra.py,sha256=
|
27
|
-
openlit/instrumentation/astra/async_astra.py,sha256=
|
26
|
+
openlit/instrumentation/astra/astra.py,sha256=u-Mh_3dI1_yRDKndI1HX3LNoG5j1J8SBH6143BW_81w,12079
|
27
|
+
openlit/instrumentation/astra/async_astra.py,sha256=ZX2jgBJGCFLfkOEv3WFvKLgzCk6Oejm9yK0NLeaG-Sg,12091
|
28
28
|
openlit/instrumentation/azure_ai_inference/__init__.py,sha256=Xl_4hjQeXcA-NgkqwTbs1ejPKRRnQXsDErXfFIz0z7U,2699
|
29
29
|
openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py,sha256=T3SLSJxwrjOaGGkedB6DT92SCHLWbaJu5YAzZzAeBsk,22748
|
30
30
|
openlit/instrumentation/azure_ai_inference/azure_ai_inference.py,sha256=IzwDZ99h7HpOI-NnEkYqOIh2sAm-2aHi4BcTMoXNx1c,22694
|
@@ -65,7 +65,7 @@ openlit/instrumentation/julep/__init__.py,sha256=oonEVK41P5g4SYRm0E_H4zCVH9NM4aJ
|
|
65
65
|
openlit/instrumentation/julep/async_julep.py,sha256=OO8lIm9uUV1lhPo_klKBVyaDwgHhFJlohTeZItd5qwU,5273
|
66
66
|
openlit/instrumentation/julep/julep.py,sha256=lDUmkEP4hXk7vgUUbNRD-mnfdfrZifXSFVVILs8Ttkw,5276
|
67
67
|
openlit/instrumentation/langchain/__init__.py,sha256=gVtPZJifx-H8rqdZlU3GXdy3NtRF8yVb7PW7gE-ddJk,3592
|
68
|
-
openlit/instrumentation/langchain/langchain.py,sha256=
|
68
|
+
openlit/instrumentation/langchain/langchain.py,sha256=za_IE-uW-PNK4L8NH3H8FelPxal58h6_R-YHoKRWf5U,38567
|
69
69
|
openlit/instrumentation/letta/__init__.py,sha256=sjjOuMvZ1EPGEluNW0WTuSXYPhrb453cBIizt88Ta3g,2951
|
70
70
|
openlit/instrumentation/letta/letta.py,sha256=V_RLcGPy3Y9shxeDEtaDu7dHMnDWA08ijxWAZuQPQWg,8292
|
71
71
|
openlit/instrumentation/litellm/__init__.py,sha256=Z-LsVHKJdPganHfJA_rWg7xAfQYkvLfpLdF-eckU4qY,2401
|
@@ -88,9 +88,9 @@ openlit/instrumentation/ollama/async_ollama.py,sha256=7lbikD-I9k8VL63idqj3VMEfiE
|
|
88
88
|
openlit/instrumentation/ollama/ollama.py,sha256=lBt1d3rFnF1tFbfdOccwjEafHnmTAUGsiOKSHku6Fkw,31277
|
89
89
|
openlit/instrumentation/openai/__init__.py,sha256=AZ2cPr3TMKkgGdMl_yXMeSi7bWhtmMqOW1iHdzHHGHA,16265
|
90
90
|
openlit/instrumentation/openai/async_azure_openai.py,sha256=XbST1UE_zXzNL6RX2XwCsK_a6IhG9PHVTMKBjGrUcB0,48961
|
91
|
-
openlit/instrumentation/openai/async_openai.py,sha256=
|
91
|
+
openlit/instrumentation/openai/async_openai.py,sha256=HSOKP6zFzfjxVx-prWCCD7ihCNAdBB7SabtaMnUGDGk,50182
|
92
92
|
openlit/instrumentation/openai/azure_openai.py,sha256=dZUc5MtCwg_sZJWiruG6exYGhPAm-339sqs3sKZNRPU,48761
|
93
|
-
openlit/instrumentation/openai/openai.py,sha256=
|
93
|
+
openlit/instrumentation/openai/openai.py,sha256=Bm7BgsIgjGcQLCPpbBt_YhBgsheZkoBeZxLZIY-jgC4,50268
|
94
94
|
openlit/instrumentation/phidata/__init__.py,sha256=rfPCXYOIsJbxChee2p269UzkJ1Z-pvQbii7Fgrw1v2g,1527
|
95
95
|
openlit/instrumentation/phidata/phidata.py,sha256=9Aza2bLgeq688Ahyy7ekbxpSh4RTD7FFKtLmv4TNbrw,4667
|
96
96
|
openlit/instrumentation/pinecone/__init__.py,sha256=Mv9bElqNs07_JQkYyNnO0wOM3hdbprmw7sttdMeKC7g,2526
|
@@ -116,7 +116,7 @@ openlit/instrumentation/vllm/vllm.py,sha256=lDzM7F5pgxvh8nKL0dcKB4TD0Mc9wXOWeXOs
|
|
116
116
|
openlit/otel/metrics.py,sha256=y7SQDTyfLakMrz0V4DThN-WAeap7YZzyndeYGSP6nVg,4516
|
117
117
|
openlit/otel/tracing.py,sha256=fG3vl-flSZ30whCi7rrG25PlkIhhr8PhnfJYCkZzCD0,3895
|
118
118
|
openlit/semcov/__init__.py,sha256=_kxniPeCdAYC_ZK982gqDR6RwgFCIK8xUPCzotwtt0k,10975
|
119
|
-
openlit-1.33.
|
120
|
-
openlit-1.33.
|
121
|
-
openlit-1.33.
|
122
|
-
openlit-1.33.
|
119
|
+
openlit-1.33.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
120
|
+
openlit-1.33.4.dist-info/METADATA,sha256=rc1Qerq-paycg38nK1I-NKvSFPizGA2fpt5DNqGRHjQ,23073
|
121
|
+
openlit-1.33.4.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
|
122
|
+
openlit-1.33.4.dist-info/RECORD,,
|
File without changes
|