openlit 1.6.0__tar.gz → 1.8.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {openlit-1.6.0 → openlit-1.8.0}/PKG-INFO +4 -2
- {openlit-1.6.0 → openlit-1.8.0}/README.md +3 -1
- {openlit-1.6.0 → openlit-1.8.0}/pyproject.toml +1 -1
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/__helpers.py +30 -5
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/__init__.py +10 -4
- openlit-1.8.0/src/openlit/instrumentation/milvus/__init__.py +94 -0
- openlit-1.8.0/src/openlit/instrumentation/milvus/milvus.py +179 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/semcov/__init__.py +4 -1
- {openlit-1.6.0 → openlit-1.8.0}/LICENSE +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/bedrock/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/bedrock/bedrock.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/chroma/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/chroma/chroma.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/cohere/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/cohere/cohere.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/groq/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/groq/async_groq.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/groq/groq.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/haystack/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/haystack/haystack.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/langchain/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/langchain/langchain.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/llamaindex/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/llamaindex/llamaindex.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/mistral/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/mistral/mistral.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/ollama/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/ollama/async_ollama.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/ollama/ollama.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/openai/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/openai/async_openai.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/openai/openai.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/qdrant/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/qdrant/qdrant.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/transformers/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/transformers/transformers.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/vertexai/__init__.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/vertexai/async_vertexai.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/instrumentation/vertexai/vertexai.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/otel/metrics.py +0 -0
- {openlit-1.6.0 → openlit-1.8.0}/src/openlit/otel/tracing.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: openlit
|
3
|
-
Version: 1.
|
3
|
+
Version: 1.8.0
|
4
4
|
Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects
|
5
5
|
Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
|
6
6
|
Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT
|
@@ -64,6 +64,7 @@ This project adheres to the [Semantic Conventions](https://github.com/open-telem
|
|
64
64
|
- [✅ ChromaDB](https://docs.openlit.io/latest/integrations/chromadb)
|
65
65
|
- [✅ Pinecone](https://docs.openlit.io/latest/integrations/pinecone)
|
66
66
|
- [✅ Qdrant](https://docs.openlit.io/latest/integrations/qdrant)
|
67
|
+
- [✅ Milvus](https://docs.openlit.io/latest/integrations/milvus)
|
67
68
|
|
68
69
|
### Frameworks
|
69
70
|
- [✅ Langchain](https://docs.openlit.io/latest/integrations/langchain)
|
@@ -164,6 +165,7 @@ Below is a detailed overview of the configuration options available, allowing yo
|
|
164
165
|
| `trace_content` | Enables tracing of content for deeper insights. | `True` | No |
|
165
166
|
| `disabled_instrumentors`| List of instrumentors to disable. Choices: `["openai", "anthropic", "langchain", "cohere", "mistral", "transformers", "chroma", "pinecone"]`. | `None` | No |
|
166
167
|
| `disable_metrics` | If set, disables the collection of metrics. | `False` | No |
|
168
|
+
| `pricing_json` | URL or file path of the pricing JSON file. | `https://github.com/openlit/openlit/blob/main/assets/pricing.json` | No |
|
167
169
|
|
168
170
|
## 🌱 Contributing
|
169
171
|
|
@@ -182,5 +184,5 @@ Connect with the OpenLIT community and maintainers for support, discussions, and
|
|
182
184
|
- 🌟 If you like it, Leave a star on our [GitHub](https://github.com/openlit/openlit/)
|
183
185
|
- 🌍 Join our [Slack](https://join.slack.com/t/openlit/shared_invite/zt-2etnfttwg-TjP_7BZXfYg84oAukY8QRQ) Community for live interactions and questions.
|
184
186
|
- 🐞 Report bugs on our [GitHub Issues](https://github.com/openlit/openlit/issues) to help us improve OpenLIT.
|
185
|
-
- 𝕏 Follow us on [X](https://
|
187
|
+
- 𝕏 Follow us on [X](https://x.com/openlit_io) for the latest updates and news.
|
186
188
|
|
@@ -39,6 +39,7 @@ This project adheres to the [Semantic Conventions](https://github.com/open-telem
|
|
39
39
|
- [✅ ChromaDB](https://docs.openlit.io/latest/integrations/chromadb)
|
40
40
|
- [✅ Pinecone](https://docs.openlit.io/latest/integrations/pinecone)
|
41
41
|
- [✅ Qdrant](https://docs.openlit.io/latest/integrations/qdrant)
|
42
|
+
- [✅ Milvus](https://docs.openlit.io/latest/integrations/milvus)
|
42
43
|
|
43
44
|
### Frameworks
|
44
45
|
- [✅ Langchain](https://docs.openlit.io/latest/integrations/langchain)
|
@@ -139,6 +140,7 @@ Below is a detailed overview of the configuration options available, allowing yo
|
|
139
140
|
| `trace_content` | Enables tracing of content for deeper insights. | `True` | No |
|
140
141
|
| `disabled_instrumentors`| List of instrumentors to disable. Choices: `["openai", "anthropic", "langchain", "cohere", "mistral", "transformers", "chroma", "pinecone"]`. | `None` | No |
|
141
142
|
| `disable_metrics` | If set, disables the collection of metrics. | `False` | No |
|
143
|
+
| `pricing_json` | URL or file path of the pricing JSON file. | `https://github.com/openlit/openlit/blob/main/assets/pricing.json` | No |
|
142
144
|
|
143
145
|
## 🌱 Contributing
|
144
146
|
|
@@ -157,4 +159,4 @@ Connect with the OpenLIT community and maintainers for support, discussions, and
|
|
157
159
|
- 🌟 If you like it, Leave a star on our [GitHub](https://github.com/openlit/openlit/)
|
158
160
|
- 🌍 Join our [Slack](https://join.slack.com/t/openlit/shared_invite/zt-2etnfttwg-TjP_7BZXfYg84oAukY8QRQ) Community for live interactions and questions.
|
159
161
|
- 🐞 Report bugs on our [GitHub Issues](https://github.com/openlit/openlit/issues) to help us improve OpenLIT.
|
160
|
-
- 𝕏 Follow us on [X](https://
|
162
|
+
- 𝕏 Follow us on [X](https://x.com/openlit_io) for the latest updates and news.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[tool.poetry]
|
2
2
|
name = "openlit"
|
3
|
-
version = "1.
|
3
|
+
version = "1.8.0"
|
4
4
|
description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects"
|
5
5
|
authors = ["OpenLIT"]
|
6
6
|
repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
|
@@ -2,8 +2,9 @@
|
|
2
2
|
"""
|
3
3
|
This module has functions to calculate model costs based on tokens and to fetch pricing information.
|
4
4
|
"""
|
5
|
-
|
5
|
+
import json
|
6
6
|
import logging
|
7
|
+
from urllib.parse import urlparse
|
7
8
|
import requests
|
8
9
|
import tiktoken
|
9
10
|
from opentelemetry.trace import Status, StatusCode
|
@@ -122,11 +123,35 @@ def get_audio_model_cost(model, pricing_info, prompt):
|
|
122
123
|
cost = 0
|
123
124
|
return cost
|
124
125
|
|
125
|
-
def fetch_pricing_info():
|
126
|
-
"""
|
127
|
-
|
126
|
+
def fetch_pricing_info(pricing_json=None):
|
127
|
+
"""
|
128
|
+
Fetches pricing information from a specified URL or File Path.
|
129
|
+
|
130
|
+
Args:
|
131
|
+
pricing_json(str): path or url to the pricing json file
|
132
|
+
|
133
|
+
Returns:
|
134
|
+
dict: The pricing json
|
135
|
+
"""
|
136
|
+
if pricing_json:
|
137
|
+
is_url = urlparse(pricing_json).scheme != ""
|
138
|
+
if is_url:
|
139
|
+
pricing_url = pricing_json
|
140
|
+
else:
|
141
|
+
try:
|
142
|
+
with open(pricing_json, mode='r', encoding='utf-8') as f:
|
143
|
+
return json.load(f)
|
144
|
+
except FileNotFoundError:
|
145
|
+
logger.error("Pricing information file not found: %s", pricing_json)
|
146
|
+
except json.JSONDecodeError:
|
147
|
+
logger.error("Error decoding JSON from file: %s", pricing_json)
|
148
|
+
except Exception as file_err:
|
149
|
+
logger.error("Unexpected error occurred while reading file: %s", file_err)
|
150
|
+
return {}
|
151
|
+
else:
|
152
|
+
pricing_url = "https://raw.githubusercontent.com/openlit/openlit/main/assets/pricing.json"
|
128
153
|
try:
|
129
|
-
|
154
|
+
# Set a timeout of 10 seconds for both the connection and the read
|
130
155
|
response = requests.get(pricing_url, timeout=20)
|
131
156
|
response.raise_for_status()
|
132
157
|
return response.json()
|
@@ -27,6 +27,7 @@ from openlit.instrumentation.haystack import HaystackInstrumentor
|
|
27
27
|
from openlit.instrumentation.chroma import ChromaInstrumentor
|
28
28
|
from openlit.instrumentation.pinecone import PineconeInstrumentor
|
29
29
|
from openlit.instrumentation.qdrant import QdrantInstrumentor
|
30
|
+
from openlit.instrumentation.milvus import MilvusInstrumentor
|
30
31
|
from openlit.instrumentation.transformers import TransformersInstrumentor
|
31
32
|
|
32
33
|
# Set up logging for error and information messages.
|
@@ -74,7 +75,8 @@ class OpenlitConfig:
|
|
74
75
|
|
75
76
|
@classmethod
|
76
77
|
def update_config(cls, environment, application_name, tracer, otlp_endpoint,
|
77
|
-
otlp_headers, disable_batch, trace_content, metrics_dict,
|
78
|
+
otlp_headers, disable_batch, trace_content, metrics_dict,
|
79
|
+
disable_metrics, pricing_json):
|
78
80
|
"""
|
79
81
|
Updates the configuration based on provided parameters.
|
80
82
|
|
@@ -87,10 +89,11 @@ class OpenlitConfig:
|
|
87
89
|
otlp_headers (Dict[str, str]): OTLP headers.
|
88
90
|
disable_batch (bool): Disable batch span processing flag.
|
89
91
|
trace_content (bool): Enable or disable content tracing.
|
92
|
+
pricing_json(str): path or url to the pricing json file
|
90
93
|
"""
|
91
94
|
cls.environment = environment
|
92
95
|
cls.application_name = application_name
|
93
|
-
cls.pricing_info = fetch_pricing_info()
|
96
|
+
cls.pricing_info = fetch_pricing_info(pricing_json)
|
94
97
|
cls.tracer = tracer
|
95
98
|
cls.metrics_dict = metrics_dict
|
96
99
|
cls.otlp_endpoint = otlp_endpoint
|
@@ -125,7 +128,7 @@ def instrument_if_available(instrumentor_name, instrumentor_instance, config,
|
|
125
128
|
|
126
129
|
def init(environment="default", application_name="default", tracer=None, otlp_endpoint=None,
|
127
130
|
otlp_headers=None, disable_batch=False, trace_content=True, disabled_instrumentors=None,
|
128
|
-
meter=None, disable_metrics=False):
|
131
|
+
meter=None, disable_metrics=False, pricing_json=None):
|
129
132
|
"""
|
130
133
|
Initializes the openLIT configuration and setups tracing.
|
131
134
|
|
@@ -143,6 +146,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
|
|
143
146
|
trace_content (bool): Flag to trace content (Optional).
|
144
147
|
disabled_instrumentors (List[str]): Optional. List of instrumentor names to disable.
|
145
148
|
disable_metrics (bool): Flag to disable metrics (Optional)
|
149
|
+
pricing_json(str): File path or url to the pricing json (Optional)
|
146
150
|
"""
|
147
151
|
disabled_instrumentors = disabled_instrumentors if disabled_instrumentors else []
|
148
152
|
# Check for invalid instrumentor names
|
@@ -162,6 +166,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
|
|
162
166
|
"chroma": "chromadb",
|
163
167
|
"pinecone": "pinecone",
|
164
168
|
"qdrant": "qdrant_client",
|
169
|
+
"milvus": "pymilvus",
|
165
170
|
"transformers": "transformers"
|
166
171
|
}
|
167
172
|
|
@@ -197,7 +202,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
|
|
197
202
|
# Update global configuration with the provided settings.
|
198
203
|
config.update_config(environment, application_name, tracer, otlp_endpoint,
|
199
204
|
otlp_headers, disable_batch, trace_content,
|
200
|
-
metrics_dict, disable_metrics)
|
205
|
+
metrics_dict, disable_metrics, pricing_json)
|
201
206
|
|
202
207
|
# Map instrumentor names to their instances
|
203
208
|
instrumentor_instances = {
|
@@ -215,6 +220,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
|
|
215
220
|
"chroma": ChromaInstrumentor(),
|
216
221
|
"pinecone": PineconeInstrumentor(),
|
217
222
|
"qdrant": QdrantInstrumentor(),
|
223
|
+
"milvus": MilvusInstrumentor(),
|
218
224
|
"transformers": TransformersInstrumentor()
|
219
225
|
}
|
220
226
|
|
@@ -0,0 +1,94 @@
|
|
1
|
+
# pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
|
2
|
+
"""Initializer of Auto Instrumentation of Milvus Functions"""
|
3
|
+
from typing import Collection
|
4
|
+
import importlib.metadata
|
5
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
6
|
+
from wrapt import wrap_function_wrapper
|
7
|
+
|
8
|
+
from openlit.instrumentation.milvus.milvus import general_wrap
|
9
|
+
|
10
|
+
_instruments = ("pymilvus >= 2.4.3",)
|
11
|
+
|
12
|
+
WRAPPED_METHODS = [
|
13
|
+
{
|
14
|
+
"package": "pymilvus",
|
15
|
+
"object": "MilvusClient.create_collection",
|
16
|
+
"endpoint": "milvus.create_collection",
|
17
|
+
"wrapper": general_wrap,
|
18
|
+
},
|
19
|
+
{
|
20
|
+
"package": "pymilvus",
|
21
|
+
"object": "MilvusClient.drop_collection",
|
22
|
+
"endpoint": "milvus.drop_collection",
|
23
|
+
"wrapper": general_wrap,
|
24
|
+
},
|
25
|
+
{
|
26
|
+
"package": "pymilvus",
|
27
|
+
"object": "MilvusClient.insert",
|
28
|
+
"endpoint": "milvus.insert",
|
29
|
+
"wrapper": general_wrap,
|
30
|
+
},
|
31
|
+
{
|
32
|
+
"package": "pymilvus",
|
33
|
+
"object": "MilvusClient.upsert",
|
34
|
+
"endpoint": "milvus.upsert",
|
35
|
+
"wrapper": general_wrap,
|
36
|
+
},
|
37
|
+
{
|
38
|
+
"package": "pymilvus",
|
39
|
+
"object": "MilvusClient.search",
|
40
|
+
"endpoint": "milvus.search",
|
41
|
+
"wrapper": general_wrap,
|
42
|
+
},
|
43
|
+
{
|
44
|
+
"package": "pymilvus",
|
45
|
+
"object": "MilvusClient.query",
|
46
|
+
"endpoint": "milvus.query",
|
47
|
+
"wrapper": general_wrap,
|
48
|
+
},
|
49
|
+
{
|
50
|
+
"package": "pymilvus",
|
51
|
+
"object": "MilvusClient.get",
|
52
|
+
"endpoint": "milvus.get",
|
53
|
+
"wrapper": general_wrap,
|
54
|
+
},
|
55
|
+
{
|
56
|
+
"package": "pymilvus",
|
57
|
+
"object": "MilvusClient.delete",
|
58
|
+
"endpoint": "milvus.delete",
|
59
|
+
"wrapper": general_wrap,
|
60
|
+
},
|
61
|
+
]
|
62
|
+
|
63
|
+
class MilvusInstrumentor(BaseInstrumentor):
|
64
|
+
"""An instrumentor for Milvus's client library."""
|
65
|
+
|
66
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
67
|
+
return _instruments
|
68
|
+
|
69
|
+
def _instrument(self, **kwargs):
|
70
|
+
application_name = kwargs.get("application_name")
|
71
|
+
environment = kwargs.get("environment")
|
72
|
+
tracer = kwargs.get("tracer")
|
73
|
+
metrics = kwargs.get("metrics_dict")
|
74
|
+
pricing_info = kwargs.get("pricing_info")
|
75
|
+
trace_content = kwargs.get("trace_content")
|
76
|
+
disable_metrics = kwargs.get("disable_metrics")
|
77
|
+
version = importlib.metadata.version("pymilvus")
|
78
|
+
|
79
|
+
for wrapped_method in WRAPPED_METHODS:
|
80
|
+
wrap_package = wrapped_method.get("package")
|
81
|
+
wrap_object = wrapped_method.get("object")
|
82
|
+
gen_ai_endpoint = wrapped_method.get("endpoint")
|
83
|
+
wrapper = wrapped_method.get("wrapper")
|
84
|
+
wrap_function_wrapper(
|
85
|
+
wrap_package,
|
86
|
+
wrap_object,
|
87
|
+
wrapper(gen_ai_endpoint, version, environment, application_name,
|
88
|
+
tracer, pricing_info, trace_content, metrics, disable_metrics),
|
89
|
+
)
|
90
|
+
|
91
|
+
|
92
|
+
@staticmethod
|
93
|
+
def _uninstrument(self, **kwargs):
|
94
|
+
pass
|
@@ -0,0 +1,179 @@
|
|
1
|
+
# pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument, possibly-used-before-assignment, too-many-branches
|
2
|
+
"""
|
3
|
+
Module for monitoring Milvus.
|
4
|
+
"""
|
5
|
+
|
6
|
+
import logging
|
7
|
+
from opentelemetry.trace import SpanKind, Status, StatusCode
|
8
|
+
from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
|
9
|
+
from openlit.__helpers import handle_exception
|
10
|
+
from openlit.semcov import SemanticConvetion
|
11
|
+
|
12
|
+
# Initialize logger for logging potential issues and operations
|
13
|
+
logger = logging.getLogger(__name__)
|
14
|
+
|
15
|
+
def object_count(obj):
|
16
|
+
"""
|
17
|
+
Counts Length of object if it exists, Else returns None
|
18
|
+
"""
|
19
|
+
try:
|
20
|
+
cnt = len(obj)
|
21
|
+
# pylint: disable=bare-except
|
22
|
+
except:
|
23
|
+
cnt = 0
|
24
|
+
|
25
|
+
return cnt
|
26
|
+
|
27
|
+
def general_wrap(gen_ai_endpoint, version, environment, application_name,
|
28
|
+
tracer, pricing_info, trace_content, metrics, disable_metrics):
|
29
|
+
"""
|
30
|
+
Creates a wrapper around a function call to trace and log its execution metrics.
|
31
|
+
|
32
|
+
This function wraps any given function to measure its execution time,
|
33
|
+
log its operation, and trace its execution using OpenTelemetry.
|
34
|
+
|
35
|
+
Parameters:
|
36
|
+
- gen_ai_endpoint (str): A descriptor or name for the endpoint being traced.
|
37
|
+
- version (str): The version of the Langchain application.
|
38
|
+
- environment (str): The deployment environment (e.g., 'production', 'development').
|
39
|
+
- application_name (str): Name of the Langchain application.
|
40
|
+
- tracer (opentelemetry.trace.Tracer): The tracer object used for OpenTelemetry tracing.
|
41
|
+
- pricing_info (dict): Information about the pricing for internal metrics (currently not used).
|
42
|
+
- trace_content (bool): Flag indicating whether to trace the content of the response.
|
43
|
+
|
44
|
+
Returns:
|
45
|
+
- function: A higher-order function that takes a function 'wrapped' and returns
|
46
|
+
a new function that wraps 'wrapped' with additional tracing and logging.
|
47
|
+
"""
|
48
|
+
|
49
|
+
def wrapper(wrapped, instance, args, kwargs):
|
50
|
+
"""
|
51
|
+
An inner wrapper function that executes the wrapped function, measures execution
|
52
|
+
time, and records trace data using OpenTelemetry.
|
53
|
+
|
54
|
+
Parameters:
|
55
|
+
- wrapped (Callable): The original function that this wrapper will execute.
|
56
|
+
- instance (object): The instance to which the wrapped function belongs. This
|
57
|
+
is used for instance methods. For static and classmethods,
|
58
|
+
this may be None.
|
59
|
+
- args (tuple): Positional arguments passed to the wrapped function.
|
60
|
+
- kwargs (dict): Keyword arguments passed to the wrapped function.
|
61
|
+
|
62
|
+
Returns:
|
63
|
+
- The result of the wrapped function call.
|
64
|
+
|
65
|
+
The wrapper initiates a span with the provided tracer, sets various attributes
|
66
|
+
on the span based on the function's execution and response, and ensures
|
67
|
+
errors are handled and logged appropriately.
|
68
|
+
"""
|
69
|
+
with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
|
70
|
+
response = wrapped(*args, **kwargs)
|
71
|
+
|
72
|
+
try:
|
73
|
+
span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
|
74
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
|
75
|
+
gen_ai_endpoint)
|
76
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
|
77
|
+
environment)
|
78
|
+
span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
|
79
|
+
application_name)
|
80
|
+
span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
|
81
|
+
SemanticConvetion.GEN_AI_TYPE_VECTORDB)
|
82
|
+
span.set_attribute(SemanticConvetion.DB_SYSTEM,
|
83
|
+
SemanticConvetion.DB_SYSTEM_MILVUS)
|
84
|
+
|
85
|
+
if gen_ai_endpoint == "milvus.create_collection":
|
86
|
+
db_operation = SemanticConvetion.DB_OPERATION_CREATE_COLLECTION
|
87
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
88
|
+
SemanticConvetion.DB_OPERATION_CREATE_COLLECTION)
|
89
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
90
|
+
kwargs.get("collection_name", ""))
|
91
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_DIMENSION,
|
92
|
+
kwargs.get("dimension", ""))
|
93
|
+
|
94
|
+
elif gen_ai_endpoint == "milvus.drop_collection":
|
95
|
+
db_operation = SemanticConvetion.DB_OPERATION_DELETE_COLLECTION
|
96
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
97
|
+
SemanticConvetion.DB_OPERATION_DELETE_COLLECTION)
|
98
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
99
|
+
kwargs.get("collection_name", ""))
|
100
|
+
|
101
|
+
elif gen_ai_endpoint == "milvus.insert":
|
102
|
+
db_operation = SemanticConvetion.DB_OPERATION_ADD
|
103
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
104
|
+
SemanticConvetion.DB_OPERATION_ADD)
|
105
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
106
|
+
kwargs.get("collection_name", ""))
|
107
|
+
span.set_attribute(SemanticConvetion.DB_VECTOR_COUNT,
|
108
|
+
object_count(kwargs.get("data")))
|
109
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION_COST,
|
110
|
+
response["cost"])
|
111
|
+
|
112
|
+
elif gen_ai_endpoint == "milvus.search":
|
113
|
+
db_operation = SemanticConvetion.DB_OPERATION_QUERY
|
114
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
115
|
+
SemanticConvetion.DB_OPERATION_QUERY)
|
116
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
117
|
+
kwargs.get("collection_name", ""))
|
118
|
+
span.set_attribute(SemanticConvetion.DB_STATEMENT,
|
119
|
+
str(kwargs.get("data")))
|
120
|
+
|
121
|
+
elif gen_ai_endpoint in ["milvus.query", "milvus.get"]:
|
122
|
+
db_operation = SemanticConvetion.DB_OPERATION_QUERY
|
123
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
124
|
+
SemanticConvetion.DB_OPERATION_QUERY)
|
125
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
126
|
+
kwargs.get("collection_name", ""))
|
127
|
+
span.set_attribute(SemanticConvetion.DB_STATEMENT,
|
128
|
+
str(kwargs.get("output_fields")))
|
129
|
+
|
130
|
+
elif gen_ai_endpoint == "milvus.upsert":
|
131
|
+
db_operation = SemanticConvetion.DB_OPERATION_ADD
|
132
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
133
|
+
SemanticConvetion.DB_OPERATION_UPSERT)
|
134
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
135
|
+
kwargs.get("collection_name", ""))
|
136
|
+
span.set_attribute(SemanticConvetion.DB_VECTOR_COUNT,
|
137
|
+
object_count(kwargs.get("data")))
|
138
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION_COST,
|
139
|
+
response["cost"])
|
140
|
+
|
141
|
+
elif gen_ai_endpoint == "milvus.delete":
|
142
|
+
db_operation = SemanticConvetion.DB_OPERATION_DELETE
|
143
|
+
span.set_attribute(SemanticConvetion.DB_OPERATION,
|
144
|
+
SemanticConvetion.DB_OPERATION_DELETE)
|
145
|
+
span.set_attribute(SemanticConvetion.DB_COLLECTION_NAME,
|
146
|
+
kwargs.get("collection_name", ""))
|
147
|
+
span.set_attribute(SemanticConvetion.DB_FILTER,
|
148
|
+
str(kwargs.get("filter", "")))
|
149
|
+
|
150
|
+
span.set_status(Status(StatusCode.OK))
|
151
|
+
|
152
|
+
if disable_metrics is False:
|
153
|
+
attributes = {
|
154
|
+
TELEMETRY_SDK_NAME:
|
155
|
+
"openlit",
|
156
|
+
SemanticConvetion.GEN_AI_APPLICATION_NAME:
|
157
|
+
application_name,
|
158
|
+
SemanticConvetion.DB_SYSTEM:
|
159
|
+
SemanticConvetion.DB_SYSTEM_MILVUS,
|
160
|
+
SemanticConvetion.GEN_AI_ENVIRONMENT:
|
161
|
+
environment,
|
162
|
+
SemanticConvetion.GEN_AI_TYPE:
|
163
|
+
SemanticConvetion.GEN_AI_TYPE_VECTORDB,
|
164
|
+
SemanticConvetion.DB_OPERATION:
|
165
|
+
db_operation
|
166
|
+
}
|
167
|
+
|
168
|
+
metrics["db_requests"].add(1, attributes)
|
169
|
+
|
170
|
+
return response
|
171
|
+
|
172
|
+
except Exception as e:
|
173
|
+
handle_exception(span, e)
|
174
|
+
logger.error("Error in trace creation: %s", e)
|
175
|
+
|
176
|
+
# Return original response
|
177
|
+
return response
|
178
|
+
|
179
|
+
return wrapper
|
@@ -99,6 +99,7 @@ class SemanticConvetion:
|
|
99
99
|
DB_COLLECTION_NAME = "db.collection.name"
|
100
100
|
DB_OPERATION = "db.operation"
|
101
101
|
DB_OPERATION_STATUS = "db.operation.status"
|
102
|
+
DB_OPERATION_COST = "db.operation.cost"
|
102
103
|
DB_OPERATION_CREATE_INDEX = "create_index"
|
103
104
|
DB_OPERATION_CREATE_COLLECTION = "create_collection"
|
104
105
|
DB_OPERATION_UPDATE_COLLECTION = "update_collection"
|
@@ -123,7 +124,8 @@ class SemanticConvetion:
|
|
123
124
|
DB_N_RESULTS = "db.n_results"
|
124
125
|
DB_DELETE_ALL = "db.delete_all"
|
125
126
|
DB_INDEX_NAME = "db.index.name"
|
126
|
-
DB_INDEX_DIMENSION = "db.
|
127
|
+
DB_INDEX_DIMENSION = "db.index.dimension"
|
128
|
+
DB_COLLECTION_DIMENSION = "db.collection.dimension"
|
127
129
|
DB_INDEX_METRIC = "db.create_index.metric"
|
128
130
|
DB_INDEX_SPEC = "db.create_index.spec"
|
129
131
|
DB_NAMESPACE = "db.query.namespace"
|
@@ -134,3 +136,4 @@ class SemanticConvetion:
|
|
134
136
|
DB_SYSTEM_CHROMA = "chroma"
|
135
137
|
DB_SYSTEM_PINECONE = "pinecone"
|
136
138
|
DB_SYSTEM_QDRANT = "qdrant"
|
139
|
+
DB_SYSTEM_MILVUS = "milvus"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|