openlit 1.34.30__py3-none-any.whl → 1.34.31__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openlit/__helpers.py +235 -86
- openlit/__init__.py +16 -13
- openlit/_instrumentors.py +2 -1
- openlit/evals/all.py +50 -21
- openlit/evals/bias_detection.py +47 -20
- openlit/evals/hallucination.py +53 -22
- openlit/evals/toxicity.py +50 -21
- openlit/evals/utils.py +54 -30
- openlit/guard/all.py +61 -19
- openlit/guard/prompt_injection.py +34 -14
- openlit/guard/restrict_topic.py +46 -15
- openlit/guard/sensitive_topic.py +34 -14
- openlit/guard/utils.py +58 -22
- openlit/instrumentation/ag2/__init__.py +24 -8
- openlit/instrumentation/ag2/ag2.py +34 -13
- openlit/instrumentation/ag2/async_ag2.py +34 -13
- openlit/instrumentation/ag2/utils.py +133 -30
- openlit/instrumentation/ai21/__init__.py +43 -14
- openlit/instrumentation/ai21/ai21.py +47 -21
- openlit/instrumentation/ai21/async_ai21.py +47 -21
- openlit/instrumentation/ai21/utils.py +299 -78
- openlit/instrumentation/anthropic/__init__.py +21 -4
- openlit/instrumentation/anthropic/anthropic.py +28 -17
- openlit/instrumentation/anthropic/async_anthropic.py +28 -17
- openlit/instrumentation/anthropic/utils.py +145 -35
- openlit/instrumentation/assemblyai/__init__.py +11 -2
- openlit/instrumentation/assemblyai/assemblyai.py +15 -4
- openlit/instrumentation/assemblyai/utils.py +120 -25
- openlit/instrumentation/astra/__init__.py +43 -10
- openlit/instrumentation/astra/astra.py +28 -5
- openlit/instrumentation/astra/async_astra.py +28 -5
- openlit/instrumentation/astra/utils.py +151 -55
- openlit/instrumentation/azure_ai_inference/__init__.py +43 -10
- openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +53 -21
- openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +53 -21
- openlit/instrumentation/azure_ai_inference/utils.py +307 -83
- openlit/instrumentation/bedrock/__init__.py +21 -4
- openlit/instrumentation/bedrock/bedrock.py +63 -25
- openlit/instrumentation/bedrock/utils.py +139 -30
- openlit/instrumentation/chroma/__init__.py +89 -16
- openlit/instrumentation/chroma/chroma.py +28 -6
- openlit/instrumentation/chroma/utils.py +167 -51
- openlit/instrumentation/cohere/__init__.py +63 -18
- openlit/instrumentation/cohere/async_cohere.py +63 -24
- openlit/instrumentation/cohere/cohere.py +63 -24
- openlit/instrumentation/cohere/utils.py +286 -73
- openlit/instrumentation/controlflow/__init__.py +35 -9
- openlit/instrumentation/controlflow/controlflow.py +66 -33
- openlit/instrumentation/crawl4ai/__init__.py +25 -10
- openlit/instrumentation/crawl4ai/async_crawl4ai.py +78 -31
- openlit/instrumentation/crawl4ai/crawl4ai.py +78 -31
- openlit/instrumentation/crewai/__init__.py +40 -15
- openlit/instrumentation/crewai/async_crewai.py +32 -7
- openlit/instrumentation/crewai/crewai.py +32 -7
- openlit/instrumentation/crewai/utils.py +159 -56
- openlit/instrumentation/dynamiq/__init__.py +46 -12
- openlit/instrumentation/dynamiq/dynamiq.py +74 -33
- openlit/instrumentation/elevenlabs/__init__.py +23 -4
- openlit/instrumentation/elevenlabs/async_elevenlabs.py +16 -4
- openlit/instrumentation/elevenlabs/elevenlabs.py +16 -4
- openlit/instrumentation/elevenlabs/utils.py +128 -25
- openlit/instrumentation/embedchain/__init__.py +11 -2
- openlit/instrumentation/embedchain/embedchain.py +68 -35
- openlit/instrumentation/firecrawl/__init__.py +24 -7
- openlit/instrumentation/firecrawl/firecrawl.py +46 -20
- openlit/instrumentation/google_ai_studio/__init__.py +45 -10
- openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +67 -44
- openlit/instrumentation/google_ai_studio/google_ai_studio.py +67 -44
- openlit/instrumentation/google_ai_studio/utils.py +180 -67
- openlit/instrumentation/gpt4all/__init__.py +22 -7
- openlit/instrumentation/gpt4all/gpt4all.py +67 -29
- openlit/instrumentation/gpt4all/utils.py +285 -61
- openlit/instrumentation/gpu/__init__.py +128 -47
- openlit/instrumentation/groq/__init__.py +21 -4
- openlit/instrumentation/groq/async_groq.py +33 -21
- openlit/instrumentation/groq/groq.py +33 -21
- openlit/instrumentation/groq/utils.py +192 -55
- openlit/instrumentation/haystack/__init__.py +70 -24
- openlit/instrumentation/haystack/async_haystack.py +28 -6
- openlit/instrumentation/haystack/haystack.py +28 -6
- openlit/instrumentation/haystack/utils.py +196 -74
- openlit/instrumentation/julep/__init__.py +69 -19
- openlit/instrumentation/julep/async_julep.py +53 -27
- openlit/instrumentation/julep/julep.py +53 -28
- openlit/instrumentation/langchain/__init__.py +74 -63
- openlit/instrumentation/langchain/callback_handler.py +1100 -0
- openlit/instrumentation/langchain_community/__init__.py +13 -2
- openlit/instrumentation/langchain_community/async_langchain_community.py +23 -5
- openlit/instrumentation/langchain_community/langchain_community.py +23 -5
- openlit/instrumentation/langchain_community/utils.py +35 -9
- openlit/instrumentation/letta/__init__.py +68 -15
- openlit/instrumentation/letta/letta.py +99 -54
- openlit/instrumentation/litellm/__init__.py +43 -14
- openlit/instrumentation/litellm/async_litellm.py +51 -26
- openlit/instrumentation/litellm/litellm.py +51 -26
- openlit/instrumentation/litellm/utils.py +304 -102
- openlit/instrumentation/llamaindex/__init__.py +267 -90
- openlit/instrumentation/llamaindex/async_llamaindex.py +28 -6
- openlit/instrumentation/llamaindex/llamaindex.py +28 -6
- openlit/instrumentation/llamaindex/utils.py +204 -91
- openlit/instrumentation/mem0/__init__.py +11 -2
- openlit/instrumentation/mem0/mem0.py +50 -29
- openlit/instrumentation/milvus/__init__.py +10 -2
- openlit/instrumentation/milvus/milvus.py +31 -6
- openlit/instrumentation/milvus/utils.py +166 -67
- openlit/instrumentation/mistral/__init__.py +63 -18
- openlit/instrumentation/mistral/async_mistral.py +63 -24
- openlit/instrumentation/mistral/mistral.py +63 -24
- openlit/instrumentation/mistral/utils.py +277 -69
- openlit/instrumentation/multion/__init__.py +69 -19
- openlit/instrumentation/multion/async_multion.py +57 -26
- openlit/instrumentation/multion/multion.py +57 -26
- openlit/instrumentation/ollama/__init__.py +39 -18
- openlit/instrumentation/ollama/async_ollama.py +57 -26
- openlit/instrumentation/ollama/ollama.py +57 -26
- openlit/instrumentation/ollama/utils.py +226 -50
- openlit/instrumentation/openai/__init__.py +156 -32
- openlit/instrumentation/openai/async_openai.py +147 -67
- openlit/instrumentation/openai/openai.py +150 -67
- openlit/instrumentation/openai/utils.py +657 -185
- openlit/instrumentation/openai_agents/__init__.py +5 -1
- openlit/instrumentation/openai_agents/processor.py +110 -90
- openlit/instrumentation/phidata/__init__.py +13 -5
- openlit/instrumentation/phidata/phidata.py +67 -32
- openlit/instrumentation/pinecone/__init__.py +48 -9
- openlit/instrumentation/pinecone/async_pinecone.py +27 -5
- openlit/instrumentation/pinecone/pinecone.py +27 -5
- openlit/instrumentation/pinecone/utils.py +153 -47
- openlit/instrumentation/premai/__init__.py +22 -7
- openlit/instrumentation/premai/premai.py +51 -26
- openlit/instrumentation/premai/utils.py +246 -59
- openlit/instrumentation/pydantic_ai/__init__.py +49 -22
- openlit/instrumentation/pydantic_ai/pydantic_ai.py +69 -16
- openlit/instrumentation/pydantic_ai/utils.py +89 -24
- openlit/instrumentation/qdrant/__init__.py +19 -4
- openlit/instrumentation/qdrant/async_qdrant.py +33 -7
- openlit/instrumentation/qdrant/qdrant.py +33 -7
- openlit/instrumentation/qdrant/utils.py +228 -93
- openlit/instrumentation/reka/__init__.py +23 -10
- openlit/instrumentation/reka/async_reka.py +17 -11
- openlit/instrumentation/reka/reka.py +17 -11
- openlit/instrumentation/reka/utils.py +138 -36
- openlit/instrumentation/together/__init__.py +44 -12
- openlit/instrumentation/together/async_together.py +50 -27
- openlit/instrumentation/together/together.py +50 -27
- openlit/instrumentation/together/utils.py +301 -71
- openlit/instrumentation/transformers/__init__.py +2 -1
- openlit/instrumentation/transformers/transformers.py +13 -3
- openlit/instrumentation/transformers/utils.py +139 -36
- openlit/instrumentation/vertexai/__init__.py +81 -16
- openlit/instrumentation/vertexai/async_vertexai.py +33 -15
- openlit/instrumentation/vertexai/utils.py +123 -27
- openlit/instrumentation/vertexai/vertexai.py +33 -15
- openlit/instrumentation/vllm/__init__.py +12 -5
- openlit/instrumentation/vllm/utils.py +121 -31
- openlit/instrumentation/vllm/vllm.py +16 -10
- openlit/otel/events.py +35 -10
- openlit/otel/metrics.py +32 -24
- openlit/otel/tracing.py +24 -9
- openlit/semcov/__init__.py +72 -6
- {openlit-1.34.30.dist-info → openlit-1.34.31.dist-info}/METADATA +2 -1
- openlit-1.34.31.dist-info/RECORD +166 -0
- openlit/instrumentation/langchain/async_langchain.py +0 -102
- openlit/instrumentation/langchain/langchain.py +0 -102
- openlit/instrumentation/langchain/utils.py +0 -252
- openlit-1.34.30.dist-info/RECORD +0 -168
- {openlit-1.34.30.dist-info → openlit-1.34.31.dist-info}/LICENSE +0 -0
- {openlit-1.34.30.dist-info → openlit-1.34.31.dist-info}/WHEEL +0 -0
@@ -1,102 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
Module for monitoring LangChain API calls.
|
3
|
-
"""
|
4
|
-
|
5
|
-
import time
|
6
|
-
from opentelemetry.trace import SpanKind
|
7
|
-
from openlit.__helpers import (
|
8
|
-
handle_exception,
|
9
|
-
set_server_address_and_port
|
10
|
-
)
|
11
|
-
from openlit.instrumentation.langchain.utils import (
|
12
|
-
get_model_from_instance,
|
13
|
-
process_chat_response,
|
14
|
-
process_hub_response,
|
15
|
-
)
|
16
|
-
from openlit.semcov import SemanticConvention
|
17
|
-
|
18
|
-
def hub(gen_ai_endpoint, version, environment, application_name, tracer,
|
19
|
-
pricing_info, capture_message_content, metrics, disable_metrics):
|
20
|
-
"""
|
21
|
-
Generates a telemetry wrapper for LangChain hub operations.
|
22
|
-
"""
|
23
|
-
|
24
|
-
def wrapper(wrapped, instance, args, kwargs):
|
25
|
-
"""
|
26
|
-
Wraps the LangChain hub operation call.
|
27
|
-
"""
|
28
|
-
|
29
|
-
server_address, server_port = set_server_address_and_port(instance, "langchain.com", 443)
|
30
|
-
|
31
|
-
with tracer.start_as_current_span(gen_ai_endpoint, kind=SpanKind.CLIENT) as span:
|
32
|
-
response = wrapped(*args, **kwargs)
|
33
|
-
|
34
|
-
try:
|
35
|
-
response = process_hub_response(
|
36
|
-
response=response,
|
37
|
-
gen_ai_endpoint=gen_ai_endpoint,
|
38
|
-
server_port=server_port,
|
39
|
-
server_address=server_address,
|
40
|
-
environment=environment,
|
41
|
-
application_name=application_name,
|
42
|
-
span=span,
|
43
|
-
version=version
|
44
|
-
)
|
45
|
-
|
46
|
-
except Exception as e:
|
47
|
-
handle_exception(span, e)
|
48
|
-
|
49
|
-
return response
|
50
|
-
|
51
|
-
return wrapper
|
52
|
-
|
53
|
-
def chat(gen_ai_endpoint, version, environment, application_name,
|
54
|
-
tracer, pricing_info, capture_message_content, metrics, disable_metrics):
|
55
|
-
"""
|
56
|
-
Generates a telemetry wrapper for GenAI operations.
|
57
|
-
"""
|
58
|
-
|
59
|
-
def wrapper(wrapped, instance, args, kwargs):
|
60
|
-
"""
|
61
|
-
Wraps the GenAI operation call.
|
62
|
-
"""
|
63
|
-
|
64
|
-
server_address, server_port = set_server_address_and_port(instance, "langchain.com", 443)
|
65
|
-
request_model = get_model_from_instance(instance)
|
66
|
-
|
67
|
-
span_name = f"{SemanticConvention.GEN_AI_OPERATION_TYPE_CHAT} {request_model}"
|
68
|
-
|
69
|
-
with tracer.start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
|
70
|
-
start_time = time.time()
|
71
|
-
response = wrapped(*args, **kwargs)
|
72
|
-
end_time = time.time()
|
73
|
-
|
74
|
-
try:
|
75
|
-
# Add instance to kwargs for processing
|
76
|
-
kwargs["instance"] = instance
|
77
|
-
|
78
|
-
response = process_chat_response(
|
79
|
-
response=response,
|
80
|
-
request_model=request_model,
|
81
|
-
pricing_info=pricing_info,
|
82
|
-
server_port=server_port,
|
83
|
-
server_address=server_address,
|
84
|
-
environment=environment,
|
85
|
-
application_name=application_name,
|
86
|
-
metrics=metrics,
|
87
|
-
start_time=start_time,
|
88
|
-
end_time=end_time,
|
89
|
-
span=span,
|
90
|
-
capture_message_content=capture_message_content,
|
91
|
-
disable_metrics=disable_metrics,
|
92
|
-
version=version,
|
93
|
-
args=args,
|
94
|
-
**kwargs
|
95
|
-
)
|
96
|
-
|
97
|
-
except Exception as e:
|
98
|
-
handle_exception(span, e)
|
99
|
-
|
100
|
-
return response
|
101
|
-
|
102
|
-
return wrapper
|
@@ -1,252 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
LangChain OpenTelemetry instrumentation utility functions
|
3
|
-
"""
|
4
|
-
import time
|
5
|
-
|
6
|
-
from opentelemetry.trace import Status, StatusCode
|
7
|
-
|
8
|
-
from openlit.__helpers import (
|
9
|
-
get_chat_model_cost,
|
10
|
-
general_tokens,
|
11
|
-
record_completion_metrics,
|
12
|
-
common_span_attributes,
|
13
|
-
)
|
14
|
-
from openlit.semcov import SemanticConvention
|
15
|
-
|
16
|
-
def format_content(messages):
|
17
|
-
"""
|
18
|
-
Format the messages into a string for span events.
|
19
|
-
"""
|
20
|
-
|
21
|
-
if not messages:
|
22
|
-
return ""
|
23
|
-
|
24
|
-
# Handle string input (simple case)
|
25
|
-
if isinstance(messages, str):
|
26
|
-
return messages
|
27
|
-
|
28
|
-
# Handle list of messages
|
29
|
-
formatted_messages = []
|
30
|
-
for message in messages:
|
31
|
-
# Handle the case where message is a tuple
|
32
|
-
if isinstance(message, tuple) and len(message) == 2:
|
33
|
-
role, content = message
|
34
|
-
# Handle the case where message is a dictionary
|
35
|
-
elif isinstance(message, dict):
|
36
|
-
role = message.get("role", "user")
|
37
|
-
content = message.get("content", "")
|
38
|
-
else:
|
39
|
-
continue
|
40
|
-
|
41
|
-
# Check if the content is a list
|
42
|
-
if isinstance(content, list):
|
43
|
-
content_str = ", ".join(
|
44
|
-
f'{item["type"]}: {item["text"] if "text" in item else item["image_url"]}'
|
45
|
-
if "type" in item else f'text: {item["text"]}'
|
46
|
-
for item in content
|
47
|
-
)
|
48
|
-
formatted_messages.append(f"{role}: {content_str}")
|
49
|
-
else:
|
50
|
-
formatted_messages.append(f"{role}: {content}")
|
51
|
-
|
52
|
-
return "\n".join(formatted_messages)
|
53
|
-
|
54
|
-
def get_model_from_instance(instance):
|
55
|
-
"""
|
56
|
-
Extract model name from LangChain instance.
|
57
|
-
"""
|
58
|
-
if hasattr(instance, "model_id"):
|
59
|
-
return instance.model_id
|
60
|
-
elif hasattr(instance, "model"):
|
61
|
-
return instance.model
|
62
|
-
elif hasattr(instance, "model_name"):
|
63
|
-
return instance.model_name
|
64
|
-
else:
|
65
|
-
return "langchain-model"
|
66
|
-
|
67
|
-
def get_attribute_from_instance(instance, attribute_name, default=-1):
|
68
|
-
"""
|
69
|
-
Get attribute from instance, checking model_kwargs first.
|
70
|
-
"""
|
71
|
-
# Attempt to retrieve model_kwargs from the instance
|
72
|
-
model_kwargs = getattr(instance, "model_kwargs", None)
|
73
|
-
|
74
|
-
# Check for attribute in model_kwargs if it exists
|
75
|
-
if model_kwargs and attribute_name in model_kwargs:
|
76
|
-
value = model_kwargs[attribute_name]
|
77
|
-
return value if value is not None else default
|
78
|
-
|
79
|
-
# Attempt to get the attribute directly from the instance
|
80
|
-
try:
|
81
|
-
value = getattr(instance, attribute_name)
|
82
|
-
# Return default if value is None
|
83
|
-
return value if value is not None else default
|
84
|
-
except AttributeError:
|
85
|
-
# Special handling for "model" attribute to consider "model_id"
|
86
|
-
if attribute_name == "model":
|
87
|
-
return getattr(instance, "model_id", "langchain-model")
|
88
|
-
|
89
|
-
# Default if the attribute isnt found in model_kwargs or the instance
|
90
|
-
return default
|
91
|
-
|
92
|
-
def common_chat_logic(scope, pricing_info, environment, application_name, metrics,
|
93
|
-
capture_message_content, disable_metrics, version, is_stream):
|
94
|
-
"""
|
95
|
-
Process chat request and generate Telemetry
|
96
|
-
"""
|
97
|
-
|
98
|
-
scope._end_time = time.time()
|
99
|
-
scope._tbt = 0 # LangChain doesnt support streaming yet
|
100
|
-
scope._ttft = scope._end_time - scope._start_time
|
101
|
-
|
102
|
-
# Extract prompt - check args[0] first (positional), then kwargs (keyword arguments)
|
103
|
-
messages = None
|
104
|
-
if scope._args and len(scope._args) > 0:
|
105
|
-
messages = scope._args[0] # llm.invoke([("system", "..."), ("human", "...")])
|
106
|
-
else:
|
107
|
-
messages = scope._kwargs.get("messages", "") or scope._kwargs.get("input", "") # llm.invoke(messages=[...])
|
108
|
-
|
109
|
-
formatted_messages = format_content(messages)
|
110
|
-
request_model = scope._request_model
|
111
|
-
|
112
|
-
# Use actual token counts from response if available, otherwise calculate them using general_tokens
|
113
|
-
if (scope._input_tokens in [None, 0] or scope._output_tokens in [None, 0]):
|
114
|
-
scope._input_tokens = general_tokens(str(formatted_messages))
|
115
|
-
scope._output_tokens = general_tokens(str(scope._llmresponse))
|
116
|
-
|
117
|
-
cost = get_chat_model_cost(request_model, pricing_info, scope._input_tokens, scope._output_tokens)
|
118
|
-
|
119
|
-
# Common Span Attributes
|
120
|
-
common_span_attributes(scope,
|
121
|
-
SemanticConvention.GEN_AI_OPERATION_TYPE_CHAT, SemanticConvention.GEN_AI_SYSTEM_LANGCHAIN,
|
122
|
-
scope._server_address, scope._server_port, request_model, scope._response_model,
|
123
|
-
environment, application_name, is_stream, scope._tbt, scope._ttft, version)
|
124
|
-
|
125
|
-
# Span Attributes for Request parameters
|
126
|
-
instance = scope._kwargs.get("instance")
|
127
|
-
if instance:
|
128
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_REQUEST_TEMPERATURE,
|
129
|
-
get_attribute_from_instance(instance, "temperature", 1.0))
|
130
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_REQUEST_TOP_K,
|
131
|
-
get_attribute_from_instance(instance, "top_k", 1.0))
|
132
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_REQUEST_TOP_P,
|
133
|
-
get_attribute_from_instance(instance, "top_p", 1.0))
|
134
|
-
|
135
|
-
# Span Attributes for Response parameters
|
136
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_RESPONSE_ID, scope._response_id)
|
137
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_RESPONSE_FINISH_REASON, [scope._finish_reason])
|
138
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_OUTPUT_TYPE, "text" if isinstance(scope._llmresponse, str) else "json")
|
139
|
-
|
140
|
-
# Span Attributes for Cost and Tokens
|
141
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_USAGE_INPUT_TOKENS, scope._input_tokens)
|
142
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_USAGE_OUTPUT_TOKENS, scope._output_tokens)
|
143
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_CLIENT_TOKEN_USAGE, scope._input_tokens + scope._output_tokens)
|
144
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_USAGE_COST, cost)
|
145
|
-
|
146
|
-
# Span Attributes for Content
|
147
|
-
if capture_message_content:
|
148
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_CONTENT_PROMPT, formatted_messages)
|
149
|
-
scope._span.set_attribute(SemanticConvention.GEN_AI_CONTENT_COMPLETION, scope._llmresponse)
|
150
|
-
|
151
|
-
# To be removed once the change to span_attributes (from span events) is complete
|
152
|
-
scope._span.add_event(
|
153
|
-
name=SemanticConvention.GEN_AI_CONTENT_PROMPT_EVENT,
|
154
|
-
attributes={
|
155
|
-
SemanticConvention.GEN_AI_CONTENT_PROMPT: formatted_messages,
|
156
|
-
},
|
157
|
-
)
|
158
|
-
scope._span.add_event(
|
159
|
-
name=SemanticConvention.GEN_AI_CONTENT_COMPLETION_EVENT,
|
160
|
-
attributes={
|
161
|
-
SemanticConvention.GEN_AI_CONTENT_COMPLETION: scope._llmresponse,
|
162
|
-
},
|
163
|
-
)
|
164
|
-
|
165
|
-
scope._span.set_status(Status(StatusCode.OK))
|
166
|
-
|
167
|
-
# Record metrics
|
168
|
-
if not disable_metrics:
|
169
|
-
record_completion_metrics(metrics, SemanticConvention.GEN_AI_OPERATION_TYPE_CHAT, SemanticConvention.GEN_AI_SYSTEM_LANGCHAIN,
|
170
|
-
scope._server_address, scope._server_port, request_model, scope._response_model, environment,
|
171
|
-
application_name, scope._start_time, scope._end_time, scope._input_tokens, scope._output_tokens,
|
172
|
-
cost, scope._tbt, scope._ttft)
|
173
|
-
|
174
|
-
def process_chat_response(response, request_model, pricing_info, server_port, server_address,
|
175
|
-
environment, application_name, metrics, start_time, end_time,
|
176
|
-
span, capture_message_content=False, disable_metrics=False,
|
177
|
-
version="1.0.0", args=None, **kwargs):
|
178
|
-
"""
|
179
|
-
Process chat response and generate telemetry.
|
180
|
-
"""
|
181
|
-
|
182
|
-
# Create scope object
|
183
|
-
scope = type("GenericScope", (), {})()
|
184
|
-
|
185
|
-
scope._start_time = start_time
|
186
|
-
scope._end_time = end_time
|
187
|
-
scope._span = span
|
188
|
-
scope._server_address = server_address
|
189
|
-
scope._server_port = server_port
|
190
|
-
scope._request_model = request_model
|
191
|
-
scope._kwargs = kwargs
|
192
|
-
scope._args = args or ()
|
193
|
-
|
194
|
-
# Extract response content and metadata - only extract what comes from the response
|
195
|
-
try:
|
196
|
-
scope._llmresponse = response.content
|
197
|
-
except AttributeError:
|
198
|
-
scope._llmresponse = str(response)
|
199
|
-
|
200
|
-
# Extract token information from usage_metadata if available
|
201
|
-
usage_metadata = getattr(response, "usage_metadata", None)
|
202
|
-
if usage_metadata:
|
203
|
-
scope._input_tokens = usage_metadata.get("input_tokens", 0)
|
204
|
-
scope._output_tokens = usage_metadata.get("output_tokens", 0)
|
205
|
-
scope._total_tokens = usage_metadata.get("total_tokens", 0)
|
206
|
-
else:
|
207
|
-
# Will be calculated in common_chat_logic if not available
|
208
|
-
scope._input_tokens = None
|
209
|
-
scope._output_tokens = None
|
210
|
-
scope._total_tokens = None
|
211
|
-
|
212
|
-
# Extract response metadata
|
213
|
-
response_metadata = getattr(response, "response_metadata", {})
|
214
|
-
scope._response_model = response_metadata.get("model_name", request_model)
|
215
|
-
scope._finish_reason = response_metadata.get("finish_reason", "stop")
|
216
|
-
|
217
|
-
# Extract response ID
|
218
|
-
scope._response_id = getattr(response, "id", "")
|
219
|
-
|
220
|
-
common_chat_logic(scope, pricing_info, environment, application_name, metrics,
|
221
|
-
capture_message_content, disable_metrics, version, is_stream=False)
|
222
|
-
|
223
|
-
return response
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
def process_hub_response(response, gen_ai_endpoint, server_port, server_address,
|
228
|
-
environment, application_name, span, version="1.0.0"):
|
229
|
-
"""
|
230
|
-
Process LangChain hub operations and generate telemetry.
|
231
|
-
"""
|
232
|
-
|
233
|
-
# Set span attributes for hub operations
|
234
|
-
span.set_attribute(SemanticConvention.GEN_AI_SYSTEM, SemanticConvention.GEN_AI_SYSTEM_LANGCHAIN)
|
235
|
-
span.set_attribute(SemanticConvention.GEN_AI_ENDPOINT, gen_ai_endpoint)
|
236
|
-
span.set_attribute(SemanticConvention.GEN_AI_OPERATION, SemanticConvention.GEN_AI_OPERATION_TYPE_FRAMEWORK)
|
237
|
-
span.set_attribute(SemanticConvention.GEN_AI_SDK_VERSION, version)
|
238
|
-
span.set_attribute(SemanticConvention.GEN_AI_ENVIRONMENT, environment)
|
239
|
-
span.set_attribute(SemanticConvention.GEN_AI_APPLICATION_NAME, application_name)
|
240
|
-
|
241
|
-
# Try to extract hub metadata
|
242
|
-
try:
|
243
|
-
span.set_attribute(SemanticConvention.GEN_AI_HUB_OWNER,
|
244
|
-
response.metadata.get("lc_hub_owner", "unknown"))
|
245
|
-
span.set_attribute(SemanticConvention.GEN_AI_HUB_REPO,
|
246
|
-
response.metadata.get("lc_hub_repo", "unknown"))
|
247
|
-
except (AttributeError, KeyError):
|
248
|
-
pass
|
249
|
-
|
250
|
-
span.set_status(Status(StatusCode.OK))
|
251
|
-
|
252
|
-
return response
|
openlit-1.34.30.dist-info/RECORD
DELETED
@@ -1,168 +0,0 @@
|
|
1
|
-
openlit/__helpers.py,sha256=4A2sUiPTF4TK4dJ2pZgmSdFlU6Ket1LDUB9Jj_1Qtkg,20358
|
2
|
-
openlit/__init__.py,sha256=gKMWmsqzb7xIJgbPvGO6yNcuzpf5lg1cShKrPSrE1MM,18560
|
3
|
-
openlit/_instrumentors.py,sha256=k7IZCCl2IeDUcyeVHxaH1krjJPC90mlveH51u6ZnUSs,5744
|
4
|
-
openlit/evals/__init__.py,sha256=nJe99nuLo1b5rf7pt9U9BCdSDedzbVi2Fj96cgl7msM,380
|
5
|
-
openlit/evals/all.py,sha256=oWrue3PotE-rB5WePG3MRYSA-ro6WivkclSHjYlAqGs,7154
|
6
|
-
openlit/evals/bias_detection.py,sha256=mCdsfK7x1vX7S3psC3g641IMlZ-7df3h-V6eiICj5N8,8154
|
7
|
-
openlit/evals/hallucination.py,sha256=Yn5OfWVJKynAiImV_aAqCvc0zqYjdJ3XUppCnMTy1pg,7507
|
8
|
-
openlit/evals/toxicity.py,sha256=Ii_kX2GToO9fDstDBuK4iN0tEQUkMoPWUBDMFFfeMC4,7000
|
9
|
-
openlit/evals/utils.py,sha256=dxi4tv1bMBPp8u78WkyVyB8--3XnplMe_VfV3Eypww8,8479
|
10
|
-
openlit/guard/__init__.py,sha256=B-D7iPadGFn5i7nEsQfVbS6feL1ViRht3zWTQ45Jgkg,417
|
11
|
-
openlit/guard/all.py,sha256=s2TajvvNnx28U6tM7sAKfU9pHK09VwGsrOQ9rLCnhO0,9743
|
12
|
-
openlit/guard/prompt_injection.py,sha256=3e4DKxB7QDzM-xPCpwEuureiH_2s_OTJ9BSckknPpzY,5784
|
13
|
-
openlit/guard/restrict_topic.py,sha256=KTuWa7XeMsV4oXxOrD1CYZV0wXWxTfA0H3p_6q_IOsk,6444
|
14
|
-
openlit/guard/sensitive_topic.py,sha256=RgVw_laFERv0nNdzBsAd2_3yLomMOK-gVq-P7oj1bTk,5552
|
15
|
-
openlit/guard/utils.py,sha256=6hE3rCRjFXYjKRQYUo8YsqUSlvod48nOWp8MwoQEYdw,7670
|
16
|
-
openlit/instrumentation/ag2/__init__.py,sha256=vHKx7ybxwtNMGoIyU3lPp8t33m3lWyA_B-1HtJoW0v4,1880
|
17
|
-
openlit/instrumentation/ag2/ag2.py,sha256=ebsBWrXf7apjLumTB4hWw19-q9V0zacyDcJ-7oZoxew,3955
|
18
|
-
openlit/instrumentation/ag2/async_ag2.py,sha256=5OnM7tos-T5O6mmxhOVI1EO3J47wodC_HSJPWpvmauM,4031
|
19
|
-
openlit/instrumentation/ag2/utils.py,sha256=HwOqUQE4HqHLDLGf8nIPc_aiOVoV7iz30piw31F9pys,7073
|
20
|
-
openlit/instrumentation/ai21/__init__.py,sha256=tKX643fwxPWPJq1EXEZd0Xpd6B0jl_ViPFmJ87f5B08,2539
|
21
|
-
openlit/instrumentation/ai21/ai21.py,sha256=zyQMfCLcOFG1tQWrZmGeMaVAmj8MtCUeXQtPHmlUAO0,6533
|
22
|
-
openlit/instrumentation/ai21/async_ai21.py,sha256=q1Dhxru4tUJu0U1Px3PptNqrSGW0-VfRGcqkLKFR8vQ,6659
|
23
|
-
openlit/instrumentation/ai21/utils.py,sha256=5zf69uw_TT8u-q-6R6rBeGm1bX0WpsbrAq-MTTZJ9Bk,14309
|
24
|
-
openlit/instrumentation/anthropic/__init__.py,sha256=mpb15zoUk1jymxUOyyk4IZSKlhsra-3MjO7RLbm1o-I,1753
|
25
|
-
openlit/instrumentation/anthropic/anthropic.py,sha256=e5by3_c8zr_6XsQX4R96Wzj840dDiQZe25BgM5melU0,4892
|
26
|
-
openlit/instrumentation/anthropic/async_anthropic.py,sha256=fZLqZO5gOMBn_KtTl-5MEDfoOFyQctpNxo6saVaCw8I,4994
|
27
|
-
openlit/instrumentation/anthropic/utils.py,sha256=W9paSh2npsCwsHyf-RyHeg0eRDcE509_f-pqb2IeO9c,9761
|
28
|
-
openlit/instrumentation/assemblyai/__init__.py,sha256=-pW7c5Vxa493yETQABbebx4be_sTx5VwkvQrIHbhRbI,1404
|
29
|
-
openlit/instrumentation/assemblyai/assemblyai.py,sha256=SJZ-O6k8adlRWJ2gMIP62vXobHJ3VI87PQOCFw9Ilng,2071
|
30
|
-
openlit/instrumentation/assemblyai/utils.py,sha256=driBfwWBveWTqHyPRtl1R8oEG6m07-GXycyCnDfZ9PM,6089
|
31
|
-
openlit/instrumentation/astra/__init__.py,sha256=NBqmJfzJINFp3QT5sMVgvQYIzWB8FPb1Exxa-EATPss,4035
|
32
|
-
openlit/instrumentation/astra/astra.py,sha256=JsuGgbVvFWn4iSocoXiA1L1X9q1pYezvp1LHk5dhPoY,2026
|
33
|
-
openlit/instrumentation/astra/async_astra.py,sha256=0LSj3Zd9L5ZFTk9OWGx1eMgfsmBopPpHKDWIYeah7D4,2056
|
34
|
-
openlit/instrumentation/astra/utils.py,sha256=rV3vcqPUcWj5rYjmga4ObWrnDDm-9V6kGqBRtVwqD24,12037
|
35
|
-
openlit/instrumentation/azure_ai_inference/__init__.py,sha256=_GuYy4ypF6_HICpAC8dNQ5-FBjkcNzPTPF4q3fTM10Q,2512
|
36
|
-
openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py,sha256=SFrniRWPqVBxFJVOpC8w1qNGSYZhCFXeKHVHu5pEdZI,5906
|
37
|
-
openlit/instrumentation/azure_ai_inference/azure_ai_inference.py,sha256=hRFuuvaXflctNNbk7N2GOfKaC_eCHbrBWf9_1sZcaGY,5808
|
38
|
-
openlit/instrumentation/azure_ai_inference/utils.py,sha256=JqKZgb6VppDbAQ2RdH_dQ2oWVnaqGIA1PCmMl1yrMtA,15491
|
39
|
-
openlit/instrumentation/bedrock/__init__.py,sha256=D4Wyjb9kJASUQMy5hI41Mvp_csP4k4F6tASdQjt8IZ0,1681
|
40
|
-
openlit/instrumentation/bedrock/bedrock.py,sha256=_2oTa9cf-OxHTFCmNUWT__vHnv7-wUcxm9-6nC1K3jg,8036
|
41
|
-
openlit/instrumentation/bedrock/utils.py,sha256=7qn8pI-BQcW8ZLxYJb_W3Yzncqbz4QBCR9f2u9N7C4Y,9345
|
42
|
-
openlit/instrumentation/chroma/__init__.py,sha256=CNlUbryf2gJ0-Ayf8d65Ec5GOkWrFl8ELqSUybuqQV4,3256
|
43
|
-
openlit/instrumentation/chroma/chroma.py,sha256=pdKq-EkcNI4FwyGYPSxDyseJedmtgyukmBU1m-A2qAc,1995
|
44
|
-
openlit/instrumentation/chroma/utils.py,sha256=AMRAKY8X88Er459eXcG0Il3VR-NoMPMnzDEI7USloeE,12312
|
45
|
-
openlit/instrumentation/cohere/__init__.py,sha256=FIJ_QokUZcN9UsVPWckRfHiLVJ5zj3jtcOjQmjQA5f0,2978
|
46
|
-
openlit/instrumentation/cohere/async_cohere.py,sha256=wgctJa-BcKEkVguLorR7mYRShv7GiioF1_zxevvzXTw,6990
|
47
|
-
openlit/instrumentation/cohere/cohere.py,sha256=7YAj6kwJ1MDLFY3WtbPRehZFIseDVIbvPQJZxG8Qc6A,6814
|
48
|
-
openlit/instrumentation/cohere/utils.py,sha256=5l3Av-t0GzawPd0j5L9jcpdJDGOvLq2YCMpo3usS4OY,14533
|
49
|
-
openlit/instrumentation/controlflow/__init__.py,sha256=Y5xSj6bwkGufG989YtRGujzj8qrV4T5kXr4hzGb2nGk,2168
|
50
|
-
openlit/instrumentation/controlflow/controlflow.py,sha256=hi4DtshGvnPjhElwX-ytkRn86r0Q6hLi9FhPCYFAG50,5561
|
51
|
-
openlit/instrumentation/crawl4ai/__init__.py,sha256=TTFFeZkTgPZJAD1Tlr2LK-CTH2A7NkZK-5SHKFfFOtU,1947
|
52
|
-
openlit/instrumentation/crawl4ai/async_crawl4ai.py,sha256=bqtopPKOMCfBx7in08QnpjYnPO09YV_uySR19YTuV98,4893
|
53
|
-
openlit/instrumentation/crawl4ai/crawl4ai.py,sha256=kX5B5ItHBw-_qa-SjSmIjFQg0ZwckB1tMkoemCvg2rc,4875
|
54
|
-
openlit/instrumentation/crewai/__init__.py,sha256=dqoLjfBckVufp7uC4-Znq0R_YJ5FgSRfvmUuofR39ZE,4751
|
55
|
-
openlit/instrumentation/crewai/async_crewai.py,sha256=iXEagpSfpdYbbQmANd6tozjksbu7QLGBOHj3a_wuq0Q,3327
|
56
|
-
openlit/instrumentation/crewai/crewai.py,sha256=iScniCy3WDOS-GThaISiUPjOz-CZVL1t1tfOYda_fqk,3640
|
57
|
-
openlit/instrumentation/crewai/utils.py,sha256=mvnQc41eEMzKcdR3-1k01SJWf_NQTOe8BHTVVju0yzs,21043
|
58
|
-
openlit/instrumentation/dynamiq/__init__.py,sha256=LuIYSQpQH5Pk5Ngl_3Jy3bImGjZgh61La6sbVJfC1Io,2391
|
59
|
-
openlit/instrumentation/dynamiq/dynamiq.py,sha256=0x-76VL5KG_HytmzAOi4ERPN0Wm5KLyMxHZmFbaWxxg,5309
|
60
|
-
openlit/instrumentation/elevenlabs/__init__.py,sha256=YDOyrxdY9VACuHY5iZ3v3FaIPcNM7lAmUInJ6H-Cw-g,1897
|
61
|
-
openlit/instrumentation/elevenlabs/async_elevenlabs.py,sha256=IjcFay1Cgdrq4IGsE1ZRQemSDBsqAqVYRkzUm8LAaBs,1925
|
62
|
-
openlit/instrumentation/elevenlabs/elevenlabs.py,sha256=Y4zik8Ds4rv21258F-VEn8I4v1S39Vb__w8MI0lAzGw,1913
|
63
|
-
openlit/instrumentation/elevenlabs/utils.py,sha256=GpphFe5F9h4s8azj155IgywbenNoRYN2DtNyDT7HWKA,5876
|
64
|
-
openlit/instrumentation/embedchain/__init__.py,sha256=x2_qvJTwWog_mH6IY987Bp9mWxHtasqX2nZ3rwA7mb4,1959
|
65
|
-
openlit/instrumentation/embedchain/embedchain.py,sha256=GwY_mh06odzM453L8Qxxf8A6NqZEc5FVq4F9ySN_4w0,7905
|
66
|
-
openlit/instrumentation/firecrawl/__init__.py,sha256=kyVsAiDBC2djifqT2w1cPRAotiEyEabNvnBeSQxi9N8,1876
|
67
|
-
openlit/instrumentation/firecrawl/firecrawl.py,sha256=4X38UrLYeGm3uez-edYA6qEc0nKC3p77yfKgKBBud0A,3826
|
68
|
-
openlit/instrumentation/google_ai_studio/__init__.py,sha256=VLNOlaTFzjOpuUzloynvADewiTmaEu1wx8FerEbmsvg,2510
|
69
|
-
openlit/instrumentation/google_ai_studio/async_google_ai_studio.py,sha256=UL5AdTwkzdTKUomTfETMgYjUl00qL7BB8U0izuXfKFo,5527
|
70
|
-
openlit/instrumentation/google_ai_studio/google_ai_studio.py,sha256=nanOoXz-1uJtdh39aD438_yMk0no3AM7VVNKzDganHo,5429
|
71
|
-
openlit/instrumentation/google_ai_studio/utils.py,sha256=-X5sHk216ajJrl4cP35f5vT8YAZaIE4yLKI7nWEKHkQ,11140
|
72
|
-
openlit/instrumentation/gpt4all/__init__.py,sha256=kXciJbQMZYnTeAYLCjriVYXV7XzUUQrwEZPmyv1WXxI,1627
|
73
|
-
openlit/instrumentation/gpt4all/gpt4all.py,sha256=6VkJbaPIDv5sbFXFiadH4IB0KljljnOZ1HaGAPuyp_E,6704
|
74
|
-
openlit/instrumentation/gpt4all/utils.py,sha256=clyoIy1_ib-1_keQFMvyzTOcbWHeWPRpDhV-w2CtIAU,12470
|
75
|
-
openlit/instrumentation/gpu/__init__.py,sha256=QQCFVEbRfdeTjmdFe-UeEiy19vEEWSIBpj2B1wYGhUs,11036
|
76
|
-
openlit/instrumentation/groq/__init__.py,sha256=WImIz76RvG4w6r1s7I-n-2FiZSTWclV-jVctETNMxJU,1731
|
77
|
-
openlit/instrumentation/groq/async_groq.py,sha256=hp3AN8B98cUbX4C0GksTbNb6kpg31FJUlhW32Wo8CnI,5113
|
78
|
-
openlit/instrumentation/groq/groq.py,sha256=XfJakQCfEszvVJxzpemYBIVE73b5WVj8bAlDaeVKBMU,4994
|
79
|
-
openlit/instrumentation/groq/utils.py,sha256=0ttCMcKmyDrSnmQtM20U5Yo6OllUPW5iBNBE5qW0jGk,9072
|
80
|
-
openlit/instrumentation/haystack/__init__.py,sha256=69pn20PbbrWLRISQmI5VKEYLcOcTe0PPHcH4sbaEbX4,3353
|
81
|
-
openlit/instrumentation/haystack/async_haystack.py,sha256=l4LeWL91ZSx_4azpRtdjRtYGL_A_mYc7ejrzqiP5Ysk,1884
|
82
|
-
openlit/instrumentation/haystack/haystack.py,sha256=qsJ189pfR1F7bcCe7vYTFGA3tCgu2s5ar0pMHOmX_Ks,1847
|
83
|
-
openlit/instrumentation/haystack/utils.py,sha256=A34ZQ97yKJcLi7NbCR_Uqg95BgyvcOvrEj3MSkw37xo,17083
|
84
|
-
openlit/instrumentation/julep/__init__.py,sha256=g-hwXjvXAb5IDs5DR_P8rKsnD4beB9tupAzuuviQT3k,3216
|
85
|
-
openlit/instrumentation/julep/async_julep.py,sha256=637HVs-_IYRA4DbBq4RVlLzd5iE9rwYvDZZ7ZFm8BbM,5315
|
86
|
-
openlit/instrumentation/julep/julep.py,sha256=4QB2cA-bhk4qUnICzjwP9fF_ZHhXtmVfbxJNi81nuKM,5318
|
87
|
-
openlit/instrumentation/langchain/__init__.py,sha256=idjeMAL8tCf1KimrS82D4RERbicSxBj82e8WNuaZWs8,2996
|
88
|
-
openlit/instrumentation/langchain/async_langchain.py,sha256=5RtaBLifJoDYBPL3d53dT2GDmDzOh5oqyZeJIXAmWxg,3426
|
89
|
-
openlit/instrumentation/langchain/langchain.py,sha256=6jO5QAZz_jYyauEyQ76nbTpiNrTLPwLNPKzXmlBn75Y,3336
|
90
|
-
openlit/instrumentation/langchain/utils.py,sha256=ermEFuOY9Djh4Np4EHeh7XRzZc-B24A_CPLqkJhvzpY,10470
|
91
|
-
openlit/instrumentation/langchain_community/__init__.py,sha256=DGNxMj6RAMQtTFD0plU826D3G-KupROwexN4GjmAFmk,2717
|
92
|
-
openlit/instrumentation/langchain_community/async_langchain_community.py,sha256=BX6ErjSX9-RXBxB5cFwDrhVKpb3OGzwpzzw5VPMpp80,1590
|
93
|
-
openlit/instrumentation/langchain_community/langchain_community.py,sha256=J-sN5eGC7r-OkPAU-lnbdG7-b_jtYs0esmFy51xdFIk,1560
|
94
|
-
openlit/instrumentation/langchain_community/utils.py,sha256=qRN8GvQoFHhMdtHOOGoZXGOC6Xk_Y_4z0bxQU6mGPSA,3326
|
95
|
-
openlit/instrumentation/letta/__init__.py,sha256=K8PtRKxuueyqEYE3LzxWJ74IieNKSI6dmk9sNRd8Mt0,3031
|
96
|
-
openlit/instrumentation/letta/letta.py,sha256=SCIpJ4tdB1l1BmeQx4raaTS4MQO5X15pLvS4PepEKBE,8481
|
97
|
-
openlit/instrumentation/litellm/__init__.py,sha256=D47yfDLLEKpkaRAy7_Yif70kj88AGqLQYZAABpTN4sE,2284
|
98
|
-
openlit/instrumentation/litellm/async_litellm.py,sha256=GdMXyCLLuC9-7nXTsKTgvXOOQnw0oTbUwL_DYp_pNoE,6780
|
99
|
-
openlit/instrumentation/litellm/litellm.py,sha256=xLna3I_jcywTtIs1tBjHAQKyKjNM07T8GHX9pIqZcQ0,6664
|
100
|
-
openlit/instrumentation/litellm/utils.py,sha256=baaNUlThexT1HIQiD6Sa4-hakXXRmqeTqF7AqDql2bo,13595
|
101
|
-
openlit/instrumentation/llamaindex/__init__.py,sha256=B_76JO93uZiWhPGqvOVpZltOQbPw9JxuQw-HBBJZqQA,10121
|
102
|
-
openlit/instrumentation/llamaindex/async_llamaindex.py,sha256=DdQmdhXtKgPpqsqgseB-MLZSm-1Sc51Q2uEeavhaIMw,1879
|
103
|
-
openlit/instrumentation/llamaindex/llamaindex.py,sha256=2mmhYeZXSfXJLzu-yPNTTpoV-E2UAYdYroWIVDFgw0M,1932
|
104
|
-
openlit/instrumentation/llamaindex/utils.py,sha256=rGFzN5J1-F9MAgr59VFC8TvOG8z0xFceXB6hrntqFH4,19043
|
105
|
-
openlit/instrumentation/mem0/__init__.py,sha256=IadP3bKgz2HCbnrh9S7AW24uDauGkzsIWeOQaGkOCc4,2447
|
106
|
-
openlit/instrumentation/mem0/mem0.py,sha256=mEK72AlUmydI1DZu8L9dnTx2B95MrIEzMfg6ExG93nQ,5336
|
107
|
-
openlit/instrumentation/milvus/__init__.py,sha256=aFRRkJtUEJcoIQjMWPdBiAaODFZrGuvUPg-9lCGkg80,1839
|
108
|
-
openlit/instrumentation/milvus/milvus.py,sha256=QCtVFGvlQkNPP2apsWOYMTsCPFugts6DMnF0Sm77AHA,1963
|
109
|
-
openlit/instrumentation/milvus/utils.py,sha256=UDk6UmJ0ovCgcs2ynOXWmJEbIqnbipO0k0BsAmVKRvg,11609
|
110
|
-
openlit/instrumentation/mistral/__init__.py,sha256=D4CLrx9KSSxAPA1m00743Og0Tl8BS47nsgp4qG5-qh8,2977
|
111
|
-
openlit/instrumentation/mistral/async_mistral.py,sha256=LWq8tYahbA7NOPDdk6DWJAKuxR4GHOC49w6L1QuNtBw,6946
|
112
|
-
openlit/instrumentation/mistral/mistral.py,sha256=lyXyPZuxVACcZoOz85G4FFH4KP77uGf3aOiFDdHLQFI,6771
|
113
|
-
openlit/instrumentation/mistral/utils.py,sha256=B_sdUXXYPq6w3-qPX6lei0eKU6OxcgXOKG40TOZWDUQ,13200
|
114
|
-
openlit/instrumentation/multion/__init__.py,sha256=Wr3lcDyG_YbOLkCUzBFhraAedF6E113tce8eSWlcz10,3149
|
115
|
-
openlit/instrumentation/multion/async_multion.py,sha256=XutZnayCJOZ_NA9bvE1NUoej41KOGR7FRn2tpoGKMEU,6092
|
116
|
-
openlit/instrumentation/multion/multion.py,sha256=-WqRAcu5qiEMY9XDmlJTQHuQiWfdwms9JDn127QCNb8,6074
|
117
|
-
openlit/instrumentation/ollama/__init__.py,sha256=WxjqjuR8ovMU5dR08OELNqClbuM7ns4hDRiwWg9NXJk,3587
|
118
|
-
openlit/instrumentation/ollama/async_ollama.py,sha256=ORXwem8lgSrhOcci55NkChIK9SNc3IYIpLjF_ogsGA8,6666
|
119
|
-
openlit/instrumentation/ollama/ollama.py,sha256=8mvrWfU1c5h1L7lxWo47YBJ7g2u7QZmSZuuP0URtTDo,6538
|
120
|
-
openlit/instrumentation/ollama/utils.py,sha256=TIE3_ur2U-iyCclna7TzwjDIFC9PZjRnZqNDV6NfG-0,11958
|
121
|
-
openlit/instrumentation/openai/__init__.py,sha256=4RWRhrRa589jiwvFf8_fLBW6UB5Btrd17mcDKv5VhJk,5546
|
122
|
-
openlit/instrumentation/openai/async_openai.py,sha256=QvEEKZnZYl9Vf-wsX1voTMMZed1eNhRI9aUT8CtFJi0,18003
|
123
|
-
openlit/instrumentation/openai/openai.py,sha256=34_FqOwSroNOm_mmLzZb8Y7xtr5StwnUyRQmHP6HHJc,17698
|
124
|
-
openlit/instrumentation/openai/utils.py,sha256=-qqRbgdJOj_XJ_5-jyjDbai_KGBCUWNrQk5vdS7JyJs,36401
|
125
|
-
openlit/instrumentation/openai_agents/__init__.py,sha256=AVQ-dMKCSSg3JsGCVBooQrVIS1F64AD-rlLQKgloYEw,2322
|
126
|
-
openlit/instrumentation/openai_agents/processor.py,sha256=DbfbEpAspARPMZb30R0A9dl-Fxv-r13Czr2zRgWwFPo,16769
|
127
|
-
openlit/instrumentation/phidata/__init__.py,sha256=tqls5-UI6FzbjxYgq_qqAfALhWJm8dHn2NtgqiQA4f8,1557
|
128
|
-
openlit/instrumentation/phidata/phidata.py,sha256=ohrxs6i0Oik75P2BrjNGbK71tdZg94ZMmaXixrXwV5M,4834
|
129
|
-
openlit/instrumentation/pinecone/__init__.py,sha256=-3wD35oCnwjwBQV3-gZs2XgpZ2wT9jmiMGjalpF9BhI,3683
|
130
|
-
openlit/instrumentation/pinecone/async_pinecone.py,sha256=AXXsRvsOgqFlYRXfE971RUsiClmPnMvMiPjKgE7y-Nk,2035
|
131
|
-
openlit/instrumentation/pinecone/pinecone.py,sha256=RjVkYvynaDcDIbmkyZnKvmeTK57Q48uOknDg7UF8MdM,1993
|
132
|
-
openlit/instrumentation/pinecone/utils.py,sha256=Gke681uUWPhXQ38OeWBgScp08EPcwGkfzK2zQ2aGQKc,10392
|
133
|
-
openlit/instrumentation/premai/__init__.py,sha256=3YlqyV-eNA_4aVUHDVUQUvGJRW8iVVcRtREw91yhbyw,1728
|
134
|
-
openlit/instrumentation/premai/premai.py,sha256=rWRqfoIZUbTz-M7zgC2Z92gTVv9fCj1Z4iJcsG86YeI,6438
|
135
|
-
openlit/instrumentation/premai/utils.py,sha256=K7EKGRDDh1X3OznG4z8H506zzFOHN6MH3oqtxM5eUyM,11409
|
136
|
-
openlit/instrumentation/pydantic_ai/__init__.py,sha256=mq52QanFI4xDx6JK-qW5yzhFPXwznJqIYsuxRoBA2Xg,2023
|
137
|
-
openlit/instrumentation/pydantic_ai/pydantic_ai.py,sha256=2F2hrowGqcPjTDLG9IeLY8OO-lXZKhLSU93XtZ3tt5A,1868
|
138
|
-
openlit/instrumentation/pydantic_ai/utils.py,sha256=b0TqhSDnRqkPdM_qsOgMuXT3lwTvHzMYpaBv2qibiVo,4307
|
139
|
-
openlit/instrumentation/qdrant/__init__.py,sha256=-2qoUBZXaZqVpOjM_XDp6_Nf4tCqErrGcNtVD6DGgb0,3010
|
140
|
-
openlit/instrumentation/qdrant/async_qdrant.py,sha256=8e6CQGMMnvsTtfYTlxJBRwfHnqHcg-eKl6MIHGuZxL4,2259
|
141
|
-
openlit/instrumentation/qdrant/qdrant.py,sha256=vu2NCzlsl3R4NB__JXexHKSDnUNbLQAhxIHt3EG6Zbs,2223
|
142
|
-
openlit/instrumentation/qdrant/utils.py,sha256=XfjpesBq1S0YtwMLPJmYacrBEP9OeM2yBQYMSUieVPk,15223
|
143
|
-
openlit/instrumentation/reka/__init__.py,sha256=wI5KUYyTAD8ni4E98uziy9WPqoQqlzybDXanFOqDan0,1720
|
144
|
-
openlit/instrumentation/reka/async_reka.py,sha256=CZk5rr7njThDkmrauRAJmNtMBgsLarTbQ54raPQb92A,1909
|
145
|
-
openlit/instrumentation/reka/reka.py,sha256=wou7vVdN_1Y5UZd4tpkLpTPAtgmAl6gmh_onLn4k4GE,1908
|
146
|
-
openlit/instrumentation/reka/utils.py,sha256=qt1ZIsWkuFGX6iPMiUJ993dh1njvc81QJECD3BnSOpE,6632
|
147
|
-
openlit/instrumentation/together/__init__.py,sha256=2o8LML05Ji_hs6xuyP3iz7vJcFIgKGaLaghSDPEcHGk,2468
|
148
|
-
openlit/instrumentation/together/async_together.py,sha256=0-h5fKw6rIwN_fvWVpGuvVqizIuM9xFCzz8Z4oGgOj0,6822
|
149
|
-
openlit/instrumentation/together/together.py,sha256=nY6mzHmHgoMbbnB_9eL0EBQjP0ltJVdkQj4pbamHAj0,6723
|
150
|
-
openlit/instrumentation/together/utils.py,sha256=n7r_pM_sqFnJEAkL7OhPydr0Uct0A74vXdcYELdbeW0,14368
|
151
|
-
openlit/instrumentation/transformers/__init__.py,sha256=hXq0WUZNl6Sz0Ihk29kA9i8Q1j0e1URFb7v7etnQpxI,1511
|
152
|
-
openlit/instrumentation/transformers/transformers.py,sha256=MHnHVo_6NP0gSIqxen6qQpCrZ0fs8Ec80EdZumMpVNo,1797
|
153
|
-
openlit/instrumentation/transformers/utils.py,sha256=MMy_SyRyDI4X-0mqbBwStac0xabmw0ZRvv_VWLA_Nkg,8426
|
154
|
-
openlit/instrumentation/vertexai/__init__.py,sha256=ti0LBtnXVN8cC8AFkn_52JSKamfR5AL9MdNac9u1R28,3640
|
155
|
-
openlit/instrumentation/vertexai/async_vertexai.py,sha256=CxogAXam2ZUpFaIyWI3llaJn5bXAtKYm0L3dYEWu278,4865
|
156
|
-
openlit/instrumentation/vertexai/utils.py,sha256=7B5bYcBNLNpt3uG64yG0UUMMyfaqSRVPDaeebppBMmk,8425
|
157
|
-
openlit/instrumentation/vertexai/vertexai.py,sha256=tHcnpw4s0ugaxP4hG5q3BFnUtLBcoEuqARdwR-dmf3c,4775
|
158
|
-
openlit/instrumentation/vllm/__init__.py,sha256=uaSzQmgDuKJ-sh61sfVdzVt2qAZaozZIQ8sbmQ0XpZE,1357
|
159
|
-
openlit/instrumentation/vllm/utils.py,sha256=HuCPNBgChWg9vA7DHNFCij_y8qj27DjZxdZ0Nvdt2fg,5751
|
160
|
-
openlit/instrumentation/vllm/vllm.py,sha256=VzazF2f4LLwjZDO_G8lIN_d622oSJM0fIO9wjxXbhyg,2004
|
161
|
-
openlit/otel/events.py,sha256=VrMjTpvnLtYRBHCiFwJojTQqqNpRCxoD4yJYeQrtPsk,3560
|
162
|
-
openlit/otel/metrics.py,sha256=GM2PDloBGRhBTkHHkYaqmOwIAQkY124ZhW4sEqW1Fgk,7086
|
163
|
-
openlit/otel/tracing.py,sha256=tjV2bEbEDPUB1Z46gE-UsJsb04sRdFrfbhIDkxViZc0,3103
|
164
|
-
openlit/semcov/__init__.py,sha256=7sj6u3FO2fwFZuwajGsStdFOl7Nh-mU84pvTkRrfKg8,20093
|
165
|
-
openlit-1.34.30.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
166
|
-
openlit-1.34.30.dist-info/METADATA,sha256=qNUvSHkp_OXxSJJUeBIWgKMqu3S_-VKJhLaXtzOIRs8,23509
|
167
|
-
openlit-1.34.30.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
168
|
-
openlit-1.34.30.dist-info/RECORD,,
|
File without changes
|
File without changes
|