openlit 1.32.9__py3-none-any.whl → 1.32.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- openlit/__init__.py +3 -0
- openlit/instrumentation/crawl4ai/__init__.py +52 -0
- openlit/instrumentation/crawl4ai/async_crawl4ai.py +104 -0
- openlit/instrumentation/crawl4ai/crawl4ai.py +104 -0
- openlit/instrumentation/dynamiq/dynamiq.py +2 -0
- openlit/semcov/__init__.py +3 -0
- {openlit-1.32.9.dist-info → openlit-1.32.10.dist-info}/METADATA +5 -5
- {openlit-1.32.9.dist-info → openlit-1.32.10.dist-info}/RECORD +10 -7
- {openlit-1.32.9.dist-info → openlit-1.32.10.dist-info}/LICENSE +0 -0
- {openlit-1.32.9.dist-info → openlit-1.32.10.dist-info}/WHEEL +0 -0
openlit/__init__.py
CHANGED
@@ -60,6 +60,7 @@ from openlit.instrumentation.phidata import PhidataInstrumentor
|
|
60
60
|
from openlit.instrumentation.julep import JulepInstrumentor
|
61
61
|
from openlit.instrumentation.ai21 import AI21Instrumentor
|
62
62
|
from openlit.instrumentation.controlflow import ControlFlowInstrumentor
|
63
|
+
from openlit.instrumentation.crawl4ai import Crawl4AIInstrumentor
|
63
64
|
from openlit.instrumentation.gpu import GPUInstrumentor
|
64
65
|
import openlit.guard
|
65
66
|
import openlit.evals
|
@@ -258,6 +259,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
|
|
258
259
|
"ai21": "ai21",
|
259
260
|
"controlflow": "controlflow",
|
260
261
|
"assemblyai": "assemblyai",
|
262
|
+
"crawl4ai": "crawl4ai",
|
261
263
|
}
|
262
264
|
|
263
265
|
invalid_instrumentors = [
|
@@ -351,6 +353,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
|
|
351
353
|
"ai21": AI21Instrumentor(),
|
352
354
|
"controlflow": ControlFlowInstrumentor(),
|
353
355
|
"assemblyai": AssemblyAIInstrumentor(),
|
356
|
+
"crawl4ai": Crawl4AIInstrumentor(),
|
354
357
|
}
|
355
358
|
|
356
359
|
# Initialize and instrument only the enabled instrumentors
|
@@ -0,0 +1,52 @@
|
|
1
|
+
# pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
|
2
|
+
"""Initializer of Auto Instrumentation of Crawl4AI Functions"""
|
3
|
+
|
4
|
+
from typing import Collection
|
5
|
+
import importlib.metadata
|
6
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
7
|
+
from wrapt import wrap_function_wrapper
|
8
|
+
|
9
|
+
from openlit.instrumentation.crawl4ai.crawl4ai import (
|
10
|
+
wrap_crawl
|
11
|
+
)
|
12
|
+
from openlit.instrumentation.crawl4ai.async_crawl4ai import (
|
13
|
+
async_wrap_crawl
|
14
|
+
)
|
15
|
+
|
16
|
+
_instruments = ("crawl4ai >= 0.4.0",)
|
17
|
+
|
18
|
+
class Crawl4AIInstrumentor(BaseInstrumentor):
|
19
|
+
"""
|
20
|
+
An instrumentor for crawl4ai's client library.
|
21
|
+
"""
|
22
|
+
|
23
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
24
|
+
return _instruments
|
25
|
+
|
26
|
+
def _instrument(self, **kwargs):
|
27
|
+
application_name = kwargs.get("application_name", "default_application")
|
28
|
+
environment = kwargs.get("environment", "default_environment")
|
29
|
+
tracer = kwargs.get("tracer")
|
30
|
+
metrics = kwargs.get("metrics_dict")
|
31
|
+
pricing_info = kwargs.get("pricing_info", {})
|
32
|
+
trace_content = kwargs.get("trace_content", False)
|
33
|
+
disable_metrics = kwargs.get("disable_metrics")
|
34
|
+
version = importlib.metadata.version("crawl4ai")
|
35
|
+
|
36
|
+
wrap_function_wrapper(
|
37
|
+
"crawl4ai.web_crawler",
|
38
|
+
"WebCrawler.run",
|
39
|
+
wrap_crawl("crawl4ai.web_crawl", version, environment, application_name,
|
40
|
+
tracer, pricing_info, trace_content, metrics, disable_metrics),
|
41
|
+
)
|
42
|
+
|
43
|
+
wrap_function_wrapper(
|
44
|
+
"crawl4ai.async_webcrawler",
|
45
|
+
"AsyncWebCrawler.arun",
|
46
|
+
async_wrap_crawl("crawl4ai.web_crawl", version, environment, application_name,
|
47
|
+
tracer, pricing_info, trace_content, metrics, disable_metrics),
|
48
|
+
)
|
49
|
+
|
50
|
+
def _uninstrument(self, **kwargs):
|
51
|
+
# Proper uninstrumentation logic to revert patched methods
|
52
|
+
pass
|
@@ -0,0 +1,104 @@
|
|
1
|
+
# pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument
|
2
|
+
"""
|
3
|
+
Module for monitoring Crawl4AI calls.
|
4
|
+
"""
|
5
|
+
|
6
|
+
import logging
|
7
|
+
from opentelemetry.trace import SpanKind, Status, StatusCode
|
8
|
+
from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
|
9
|
+
from openlit.__helpers import (
|
10
|
+
handle_exception,
|
11
|
+
)
|
12
|
+
from openlit.semcov import SemanticConvetion
|
13
|
+
|
14
|
+
# Initialize logger for logging potential issues and operations
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
def async_wrap_crawl(gen_ai_endpoint, version, environment, application_name,
|
18
|
+
tracer, pricing_info, trace_content, metrics, disable_metrics):
|
19
|
+
"""
|
20
|
+
Generates a telemetry wrapper for chat completions to collect metrics.
|
21
|
+
|
22
|
+
Args:
|
23
|
+
gen_ai_endpoint: Endpoint identifier for logging and tracing.
|
24
|
+
version: Version of the monitoring package.
|
25
|
+
environment: Deployment environment (e.g., production, staging).
|
26
|
+
application_name: Name of the application using the Crawl4AI Agent.
|
27
|
+
tracer: OpenTelemetry tracer for creating spans.
|
28
|
+
pricing_info: Information used for calculating the cost of Crawl4AI usage.
|
29
|
+
trace_content: Flag indicating whether to trace the actual content.
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
A function that wraps the chat completions method to add telemetry.
|
33
|
+
"""
|
34
|
+
|
35
|
+
async def wrapper(wrapped, instance, args, kwargs):
|
36
|
+
"""
|
37
|
+
Wraps the 'chat.completions' API call to add telemetry.
|
38
|
+
|
39
|
+
This collects metrics such as execution time, cost, and token usage, and handles errors
|
40
|
+
gracefully, adding details to the trace for observability.
|
41
|
+
|
42
|
+
Args:
|
43
|
+
wrapped: The original 'chat.completions' method to be wrapped.
|
44
|
+
instance: The instance of the class where the original method is defined.
|
45
|
+
args: Positional arguments for the 'chat.completions' method.
|
46
|
+
kwargs: Keyword arguments for the 'chat.completions' method.
|
47
|
+
|
48
|
+
Returns:
|
49
|
+
The response from the original 'chat.completions' method.
|
50
|
+
"""
|
51
|
+
|
52
|
+
# pylint: disable=line-too-long
|
53
|
+
with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
|
54
|
+
response = await wrapped(*args, **kwargs)
|
55
|
+
|
56
|
+
try:
|
57
|
+
# Set base span attribues
|
58
|
+
span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
|
59
|
+
span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
|
60
|
+
SemanticConvetion.GEN_AI_SYSTEM_CRAWL4AI)
|
61
|
+
span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
|
62
|
+
SemanticConvetion.GEN_AI_TYPE_AGENT)
|
63
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
|
64
|
+
gen_ai_endpoint)
|
65
|
+
span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
|
66
|
+
application_name)
|
67
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
|
68
|
+
environment)
|
69
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
|
70
|
+
"browser")
|
71
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ENABLE_CACHE, not kwargs.get("disable_cache", False))
|
72
|
+
|
73
|
+
url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
|
74
|
+
if url is not None:
|
75
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_BROWSE_URL, url)
|
76
|
+
|
77
|
+
extraction_strategy = kwargs.get("extraction_strategy", "NoExtractionStrategy")
|
78
|
+
extraction_name = extraction_strategy.name if hasattr(extraction_strategy, 'name') else extraction_strategy
|
79
|
+
|
80
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_STRATEGY, extraction_name)
|
81
|
+
|
82
|
+
if extraction_name == "LLMExtractionStrategy" and hasattr(extraction_strategy, 'provider'):
|
83
|
+
_, llm_model = extraction_strategy.provider.split('/')
|
84
|
+
span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, llm_model)
|
85
|
+
|
86
|
+
elif extraction_name == "CosineStrategy":
|
87
|
+
span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, "all-MiniLM-L6-v2")
|
88
|
+
|
89
|
+
elif extraction_name == "JsonCssExtractionStrategy" and hasattr(extraction_strategy, 'schema'):
|
90
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_SCHEMA, str(extraction_strategy.schema))
|
91
|
+
|
92
|
+
span.set_status(Status(StatusCode.OK))
|
93
|
+
|
94
|
+
# Return original response
|
95
|
+
return response
|
96
|
+
|
97
|
+
except Exception as e:
|
98
|
+
handle_exception(span, e)
|
99
|
+
logger.error("Error in trace creation: %s", e)
|
100
|
+
|
101
|
+
# Return original response
|
102
|
+
return response
|
103
|
+
|
104
|
+
return wrapper
|
@@ -0,0 +1,104 @@
|
|
1
|
+
# pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument
|
2
|
+
"""
|
3
|
+
Module for monitoring Crawl4AI calls.
|
4
|
+
"""
|
5
|
+
|
6
|
+
import logging
|
7
|
+
from opentelemetry.trace import SpanKind, Status, StatusCode
|
8
|
+
from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
|
9
|
+
from openlit.__helpers import (
|
10
|
+
handle_exception,
|
11
|
+
)
|
12
|
+
from openlit.semcov import SemanticConvetion
|
13
|
+
|
14
|
+
# Initialize logger for logging potential issues and operations
|
15
|
+
logger = logging.getLogger(__name__)
|
16
|
+
|
17
|
+
def wrap_crawl(gen_ai_endpoint, version, environment, application_name,
|
18
|
+
tracer, pricing_info, trace_content, metrics, disable_metrics):
|
19
|
+
"""
|
20
|
+
Generates a telemetry wrapper for chat completions to collect metrics.
|
21
|
+
|
22
|
+
Args:
|
23
|
+
gen_ai_endpoint: Endpoint identifier for logging and tracing.
|
24
|
+
version: Version of the monitoring package.
|
25
|
+
environment: Deployment environment (e.g., production, staging).
|
26
|
+
application_name: Name of the application using the Crawl4AI Agent.
|
27
|
+
tracer: OpenTelemetry tracer for creating spans.
|
28
|
+
pricing_info: Information used for calculating the cost of Crawl4AI usage.
|
29
|
+
trace_content: Flag indicating whether to trace the actual content.
|
30
|
+
|
31
|
+
Returns:
|
32
|
+
A function that wraps the chat completions method to add telemetry.
|
33
|
+
"""
|
34
|
+
|
35
|
+
def wrapper(wrapped, instance, args, kwargs):
|
36
|
+
"""
|
37
|
+
Wraps the 'chat.completions' API call to add telemetry.
|
38
|
+
|
39
|
+
This collects metrics such as execution time, cost, and token usage, and handles errors
|
40
|
+
gracefully, adding details to the trace for observability.
|
41
|
+
|
42
|
+
Args:
|
43
|
+
wrapped: The original 'chat.completions' method to be wrapped.
|
44
|
+
instance: The instance of the class where the original method is defined.
|
45
|
+
args: Positional arguments for the 'chat.completions' method.
|
46
|
+
kwargs: Keyword arguments for the 'chat.completions' method.
|
47
|
+
|
48
|
+
Returns:
|
49
|
+
The response from the original 'chat.completions' method.
|
50
|
+
"""
|
51
|
+
|
52
|
+
# pylint: disable=line-too-long
|
53
|
+
with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
|
54
|
+
response = wrapped(*args, **kwargs)
|
55
|
+
|
56
|
+
try:
|
57
|
+
# Set base span attribues
|
58
|
+
span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
|
59
|
+
span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
|
60
|
+
SemanticConvetion.GEN_AI_SYSTEM_CRAWL4AI)
|
61
|
+
span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
|
62
|
+
SemanticConvetion.GEN_AI_TYPE_AGENT)
|
63
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
|
64
|
+
gen_ai_endpoint)
|
65
|
+
span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
|
66
|
+
application_name)
|
67
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
|
68
|
+
environment)
|
69
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
|
70
|
+
"browser")
|
71
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ENABLE_CACHE, not kwargs.get("disable_cache", False))
|
72
|
+
|
73
|
+
url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
|
74
|
+
if url is not None:
|
75
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_BROWSE_URL, url)
|
76
|
+
|
77
|
+
extraction_strategy = kwargs.get("extraction_strategy", "NoExtractionStrategy")
|
78
|
+
extraction_name = extraction_strategy.name if hasattr(extraction_strategy, 'name') else extraction_strategy
|
79
|
+
|
80
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_STRATEGY, extraction_name)
|
81
|
+
|
82
|
+
if extraction_name == "LLMExtractionStrategy" and hasattr(extraction_strategy, 'provider'):
|
83
|
+
_, llm_model = extraction_strategy.provider.split('/')
|
84
|
+
span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, llm_model)
|
85
|
+
|
86
|
+
elif extraction_name == "CosineStrategy":
|
87
|
+
span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, "all-MiniLM-L6-v2")
|
88
|
+
|
89
|
+
elif extraction_name == "JsonCssExtractionStrategy" and hasattr(extraction_strategy, 'schema'):
|
90
|
+
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_SCHEMA, str(extraction_strategy.schema))
|
91
|
+
|
92
|
+
span.set_status(Status(StatusCode.OK))
|
93
|
+
|
94
|
+
# Return original response
|
95
|
+
return response
|
96
|
+
|
97
|
+
except Exception as e:
|
98
|
+
handle_exception(span, e)
|
99
|
+
logger.error("Error in trace creation: %s", e)
|
100
|
+
|
101
|
+
# Return original response
|
102
|
+
return response
|
103
|
+
|
104
|
+
return wrapper
|
@@ -64,6 +64,8 @@ def dynamiq_wrap(gen_ai_endpoint, version, environment, application_name,
|
|
64
64
|
gen_ai_endpoint)
|
65
65
|
span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
|
66
66
|
application_name)
|
67
|
+
span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
|
68
|
+
environment)
|
67
69
|
|
68
70
|
if gen_ai_endpoint == "dynamiq.agent_run":
|
69
71
|
span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ID,
|
openlit/semcov/__init__.py
CHANGED
@@ -131,6 +131,7 @@ class SemanticConvetion:
|
|
131
131
|
GEN_AI_SYSTEM_AI21 = "ai21"
|
132
132
|
GEN_AI_SYSTEM_CONTROLFLOW = "controlflow"
|
133
133
|
GEN_AI_SYSTEM_ASSEMBLYAI = "assemblyai"
|
134
|
+
GEN_AI_SYSTEM_CRAWL4AI = "crawl4ai"
|
134
135
|
|
135
136
|
# Vector DB
|
136
137
|
DB_OPERATION_API_ENDPOINT = "db.operation.api_endpoint"
|
@@ -204,10 +205,12 @@ class SemanticConvetion:
|
|
204
205
|
GEN_AI_AGENT_EXPECTED_OUTPUT = "gen_ai.agent.expected_output"
|
205
206
|
GEN_AI_AGENT_ACTUAL_OUTPUT = "gen_ai.agent.actual_output"
|
206
207
|
GEN_AI_AGENT_HUMAN_INPUT = "gen_ai.agent.human_input"
|
208
|
+
GEN_AI_AGENT_SCHEMA = "gen_ai.agent.schema"
|
207
209
|
GEN_AI_AGENT_TASK_ASSOCIATION = "gen_ai.agent.task_associations"
|
208
210
|
GEN_AI_AGENT_BROWSE_URL = "gen_ai.agent.browse_url"
|
209
211
|
GEN_AI_AGENT_STEP_COUNT = "gen_ai.agent.step_count"
|
210
212
|
GEN_AI_AGENT_RESPONSE_TIME = "gen_ai.agent.response_time"
|
213
|
+
GEN_AI_AGENT_STRATEGY = "gen_ai.agent.strategy"
|
211
214
|
|
212
215
|
# GPU
|
213
216
|
GPU_INDEX = "gpu.index"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: openlit
|
3
|
-
Version: 1.32.
|
3
|
+
Version: 1.32.10
|
4
4
|
Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
|
5
5
|
Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
|
6
6
|
Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
|
@@ -69,7 +69,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
|
|
69
69
|
| [✅ Ollama](https://docs.openlit.io/latest/integrations/ollama) | [✅ Pinecone](https://docs.openlit.io/latest/integrations/pinecone) | [✅ LiteLLM](https://docs.openlit.io/latest/integrations/litellm) | [✅ AMD](https://docs.openlit.io/latest/integrations/amd-gpu) |
|
70
70
|
| [✅ Anthropic](https://docs.openlit.io/latest/integrations/anthropic) | [✅ Qdrant](https://docs.openlit.io/latest/integrations/qdrant) | [✅ LlamaIndex](https://docs.openlit.io/latest/integrations/llama-index) | |
|
71
71
|
| [✅ GPT4All](https://docs.openlit.io/latest/integrations/gpt4all) | [✅ Milvus](https://docs.openlit.io/latest/integrations/milvus) | [✅ Haystack](https://docs.openlit.io/latest/integrations/haystack) | |
|
72
|
-
| [✅ Cohere](https://docs.openlit.io/latest/integrations/cohere) | [✅ AstraDB](https://docs.openlit.io/latest/integrations/astradb)
|
72
|
+
| [✅ Cohere](https://docs.openlit.io/latest/integrations/cohere) | [✅ AstraDB](https://docs.openlit.io/latest/integrations/astradb) | [✅ EmbedChain](https://docs.openlit.io/latest/integrations/embedchain) | |
|
73
73
|
| [✅ Mistral](https://docs.openlit.io/latest/integrations/mistral) | | [✅ Guardrails](https://docs.openlit.io/latest/integrations/guardrails) | |
|
74
74
|
| [✅ Azure OpenAI](https://docs.openlit.io/latest/integrations/azure-openai) | | [✅ CrewAI](https://docs.openlit.io/latest/integrations/crewai) | |
|
75
75
|
| [✅ Azure AI Inference](https://docs.openlit.io/latest/integrations/azure-ai-inference) | | [✅ DSPy](https://docs.openlit.io/latest/integrations/dspy) | |
|
@@ -80,7 +80,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
|
|
80
80
|
| [✅ Vertex AI](https://docs.openlit.io/latest/integrations/vertexai) | | [✅ MultiOn](https://docs.openlit.io/latest/integrations/multion) | |
|
81
81
|
| [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
|
82
82
|
| [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
|
83
|
-
| [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | |
|
83
|
+
| [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | [✅ Crawl4AI](https://docs.openlit.io/latest/integrations/crawl4ai) | |
|
84
84
|
| [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
|
85
85
|
| [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
|
86
86
|
| [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
|
@@ -88,7 +88,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
|
|
88
88
|
| [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
|
89
89
|
| [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
|
90
90
|
| [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
|
91
|
-
| [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai)
|
91
|
+
| [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
|
92
92
|
|
93
93
|
## Supported Destinations
|
94
94
|
- [✅ OpenTelemetry Collector](https://docs.openlit.io/latest/connections/otelcol)
|
@@ -260,7 +260,7 @@ We are dedicated to continuously improving OpenLIT SDKs. Here's a look at what's
|
|
260
260
|
| [OpenTelemetry-native auto-instrumentation for NVIDIA GPU Monitoring](https://docs.openlit.io/latest/features/gpu) | ✅ Completed |
|
261
261
|
| [Real-Time Guardrails Implementation](https://docs.openlit.io/latest/features/guardrails) | ✅ Completed |
|
262
262
|
| [Programmatic Evaluation for LLM Response](https://docs.openlit.io/latest/features/evaluations) | ✅ Completed |
|
263
|
-
| [
|
263
|
+
| [OpenTelemetry-native AI Agent Observability]() | ✅ Completed |
|
264
264
|
|
265
265
|
|
266
266
|
## 🌱 Contributing
|
@@ -1,5 +1,5 @@
|
|
1
1
|
openlit/__helpers.py,sha256=bqMxdNndLW5NGO2wwpAoHEOnAFr_mhnmVLua3ifpSEc,6427
|
2
|
-
openlit/__init__.py,sha256=
|
2
|
+
openlit/__init__.py,sha256=M9Ajc9c4iTOrXf3Mv7BowZah6h0quRZMqSeIYu79n5Y,21590
|
3
3
|
openlit/evals/__init__.py,sha256=nJe99nuLo1b5rf7pt9U9BCdSDedzbVi2Fj96cgl7msM,380
|
4
4
|
openlit/evals/all.py,sha256=oWrue3PotE-rB5WePG3MRYSA-ro6WivkclSHjYlAqGs,7154
|
5
5
|
openlit/evals/bias_detection.py,sha256=mCdsfK7x1vX7S3psC3g641IMlZ-7df3h-V6eiICj5N8,8154
|
@@ -36,10 +36,13 @@ openlit/instrumentation/cohere/__init__.py,sha256=PC5T1qIg9pwLNocBP_WjG5B_6p_z01
|
|
36
36
|
openlit/instrumentation/cohere/cohere.py,sha256=62-P2K39v6pIJme6vTVViLJ9PP8q_UWkTv2l3Wa2gHA,21217
|
37
37
|
openlit/instrumentation/controlflow/__init__.py,sha256=iKZ08IANfoN_n4o1TZJIK_C_t6RZQ6AS1H7kMfyBbYA,2118
|
38
38
|
openlit/instrumentation/controlflow/controlflow.py,sha256=DP4KWBzcVg-zeCb4C6r-hK9_LdDzWNPBsOjbK-5WRqY,5528
|
39
|
+
openlit/instrumentation/crawl4ai/__init__.py,sha256=CGkcbQijpKae_8GD_1ybDnCCk0MVu2AdV-ppFOg8mAA,1907
|
40
|
+
openlit/instrumentation/crawl4ai/async_crawl4ai.py,sha256=YwAnKaLUg6BK72q6wbFGEQT2GSyITNhzdC5B1MP4QXw,4815
|
41
|
+
openlit/instrumentation/crawl4ai/crawl4ai.py,sha256=D_i_wqOa86KC6XMPXTHLs-HhMPL9yJ9GCroq0wY4HFc,4797
|
39
42
|
openlit/instrumentation/crewai/__init__.py,sha256=cETkkwnKYEMAKlMrHbZ9-RvcRUPYaSNqNIhy2-vCDK8,1794
|
40
43
|
openlit/instrumentation/crewai/crewai.py,sha256=mpEJql6aDs3wwBjLz686anOHkIA5gWfhFCCHAgJRY0w,7049
|
41
44
|
openlit/instrumentation/dynamiq/__init__.py,sha256=2uIHHxFWca0g2YLO2RBfi2Al6uWUYvVZBfDiPOHCdpQ,2331
|
42
|
-
openlit/instrumentation/dynamiq/dynamiq.py,sha256=
|
45
|
+
openlit/instrumentation/dynamiq/dynamiq.py,sha256=xbJlSLhiWChR7kJ6BOjcIM4WUGqI0n_JUFpdPPgK99k,5250
|
43
46
|
openlit/instrumentation/elevenlabs/__init__.py,sha256=BZjAe-kzFJpKxT0tKksXVfZgirvgEp8qM3SfegWU5co,2631
|
44
47
|
openlit/instrumentation/elevenlabs/async_elevenlabs.py,sha256=yMYACh95SFr5EYklKnXw2DrPFa3iIgM4qQMWjO1itMU,5690
|
45
48
|
openlit/instrumentation/elevenlabs/elevenlabs.py,sha256=mFnD7sgT47OxaXJz0Vc1nrNjXEpcGQDj5run3gA48Lw,6089
|
@@ -105,8 +108,8 @@ openlit/instrumentation/vllm/__init__.py,sha256=OVWalQ1dXvip1DUsjUGaHX4J-2FrSp-T
|
|
105
108
|
openlit/instrumentation/vllm/vllm.py,sha256=lDzM7F5pgxvh8nKL0dcKB4TD0Mc9wXOWeXOsOGN7Wd8,6527
|
106
109
|
openlit/otel/metrics.py,sha256=y7SQDTyfLakMrz0V4DThN-WAeap7YZzyndeYGSP6nVg,4516
|
107
110
|
openlit/otel/tracing.py,sha256=fG3vl-flSZ30whCi7rrG25PlkIhhr8PhnfJYCkZzCD0,3895
|
108
|
-
openlit/semcov/__init__.py,sha256=
|
109
|
-
openlit-1.32.
|
110
|
-
openlit-1.32.
|
111
|
-
openlit-1.32.
|
112
|
-
openlit-1.32.
|
111
|
+
openlit/semcov/__init__.py,sha256=xCbAepANEnXzC8TTMM50l1VOc5iNZ6YXAy7rTw-KmXY,10768
|
112
|
+
openlit-1.32.10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
113
|
+
openlit-1.32.10.dist-info/METADATA,sha256=cH4MIdfCaxSfhKi4FsCFCUy0tcCtCkdORpxRgYdyhpA,22677
|
114
|
+
openlit-1.32.10.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
|
115
|
+
openlit-1.32.10.dist-info/RECORD,,
|
File without changes
|
File without changes
|