openlit 1.32.10__py3-none-any.whl → 1.32.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
openlit/__init__.py CHANGED
@@ -61,6 +61,8 @@ from openlit.instrumentation.julep import JulepInstrumentor
61
61
  from openlit.instrumentation.ai21 import AI21Instrumentor
62
62
  from openlit.instrumentation.controlflow import ControlFlowInstrumentor
63
63
  from openlit.instrumentation.crawl4ai import Crawl4AIInstrumentor
64
+ from openlit.instrumentation.firecrawl import FireCrawlInstrumentor
65
+ from openlit.instrumentation.letta import LettaInstrumentor
64
66
  from openlit.instrumentation.gpu import GPUInstrumentor
65
67
  import openlit.guard
66
68
  import openlit.evals
@@ -260,6 +262,8 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
260
262
  "controlflow": "controlflow",
261
263
  "assemblyai": "assemblyai",
262
264
  "crawl4ai": "crawl4ai",
265
+ "firecrawl": "firecrawl",
266
+ "letta": "letta",
263
267
  }
264
268
 
265
269
  invalid_instrumentors = [
@@ -354,6 +358,8 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
354
358
  "controlflow": ControlFlowInstrumentor(),
355
359
  "assemblyai": AssemblyAIInstrumentor(),
356
360
  "crawl4ai": Crawl4AIInstrumentor(),
361
+ "firecrawl": FireCrawlInstrumentor(),
362
+ "letta": LettaInstrumentor(),
357
363
  }
358
364
 
359
365
  # Initialize and instrument only the enabled instrumentors
@@ -67,7 +67,7 @@ def async_wrap_crawl(gen_ai_endpoint, version, environment, application_name,
67
67
  span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
68
  environment)
69
69
  span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
70
- "browser")
70
+ SemanticConvetion.GEN_AI_AGENT_TYPE_BROWSER)
71
71
  span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ENABLE_CACHE, not kwargs.get("disable_cache", False))
72
72
 
73
73
  url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
@@ -67,7 +67,7 @@ def wrap_crawl(gen_ai_endpoint, version, environment, application_name,
67
67
  span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
68
  environment)
69
69
  span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
70
- "browser")
70
+ SemanticConvetion.GEN_AI_AGENT_TYPE_BROWSER)
71
71
  span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ENABLE_CACHE, not kwargs.get("disable_cache", False))
72
72
 
73
73
  url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
@@ -0,0 +1,49 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of FireCrawl Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.firecrawl.firecrawl import (
10
+ wrap_crawl
11
+ )
12
+
13
+ _instruments = ("firecrawl-py >= 1.6.3",)
14
+
15
+ class FireCrawlInstrumentor(BaseInstrumentor):
16
+ """
17
+ An instrumentor for firecrawl's client library.
18
+ """
19
+
20
+ def instrumentation_dependencies(self) -> Collection[str]:
21
+ return _instruments
22
+
23
+ def _instrument(self, **kwargs):
24
+ application_name = kwargs.get("application_name", "default_application")
25
+ environment = kwargs.get("environment", "default_environment")
26
+ tracer = kwargs.get("tracer")
27
+ metrics = kwargs.get("metrics_dict")
28
+ pricing_info = kwargs.get("pricing_info", {})
29
+ trace_content = kwargs.get("trace_content", False)
30
+ disable_metrics = kwargs.get("disable_metrics")
31
+ version = importlib.metadata.version("firecrawl-py")
32
+
33
+ wrap_function_wrapper(
34
+ "firecrawl.firecrawl",
35
+ "FirecrawlApp.scrape_url",
36
+ wrap_crawl("firecrawl.scrape_url", version, environment, application_name,
37
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
38
+ )
39
+
40
+ wrap_function_wrapper(
41
+ "firecrawl.firecrawl",
42
+ "FirecrawlApp.crawl_url",
43
+ wrap_crawl("firecrawl.crawl_url", version, environment, application_name,
44
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
45
+ )
46
+
47
+ def _uninstrument(self, **kwargs):
48
+ # Proper uninstrumentation logic to revert patched methods
49
+ pass
@@ -0,0 +1,90 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument
2
+ """
3
+ Module for monitoring FireCrawl calls.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import (
10
+ handle_exception,
11
+ )
12
+ from openlit.semcov import SemanticConvetion
13
+
14
+ # Initialize logger for logging potential issues and operations
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def wrap_crawl(gen_ai_endpoint, version, environment, application_name,
18
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
19
+ """
20
+ Generates a telemetry wrapper for chat completions to collect metrics.
21
+
22
+ Args:
23
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
24
+ version: Version of the monitoring package.
25
+ environment: Deployment environment (e.g., production, staging).
26
+ application_name: Name of the application using the FireCrawl Agent.
27
+ tracer: OpenTelemetry tracer for creating spans.
28
+ pricing_info: Information used for calculating the cost of FireCrawl usage.
29
+ trace_content: Flag indicating whether to trace the actual content.
30
+
31
+ Returns:
32
+ A function that wraps the chat completions method to add telemetry.
33
+ """
34
+
35
+ def wrapper(wrapped, instance, args, kwargs):
36
+ """
37
+ Wraps the API call to add telemetry.
38
+
39
+ This collects metrics such as execution time, cost, and token usage, and handles errors
40
+ gracefully, adding details to the trace for observability.
41
+
42
+ Args:
43
+ wrapped: The original method to be wrapped.
44
+ instance: The instance of the class where the original method is defined.
45
+ args: Positional arguments for the method.
46
+ kwargs: Keyword arguments for the method.
47
+
48
+ Returns:
49
+ The response from the original method.
50
+ """
51
+
52
+ # pylint: disable=line-too-long
53
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
54
+ response = wrapped(*args, **kwargs)
55
+
56
+ try:
57
+ # Set base span attribues
58
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
59
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
60
+ SemanticConvetion.GEN_AI_SYSTEM_FIRECRAWL)
61
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
62
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
+ application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
70
+ SemanticConvetion.GEN_AI_AGENT_TYPE_BROWSER)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_PARAMS,
72
+ str(kwargs.get("params")))
73
+
74
+ url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
75
+ if url is not None:
76
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_BROWSE_URL, url)
77
+
78
+ span.set_status(Status(StatusCode.OK))
79
+
80
+ # Return original response
81
+ return response
82
+
83
+ except Exception as e:
84
+ handle_exception(span, e)
85
+ logger.error("Error in trace creation: %s", e)
86
+
87
+ # Return original response
88
+ return response
89
+
90
+ return wrapper
@@ -0,0 +1,77 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of Letta Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.letta.letta import (
10
+ create_agent, send_message
11
+ )
12
+
13
+ _instruments = ("letta >= 0.6.2",)
14
+
15
+ class LettaInstrumentor(BaseInstrumentor):
16
+ """
17
+ An instrumentor for Letta's client library.
18
+ """
19
+
20
+ def instrumentation_dependencies(self) -> Collection[str]:
21
+ return _instruments
22
+
23
+ def _instrument(self, **kwargs):
24
+ application_name = kwargs.get("application_name", "default_application")
25
+ environment = kwargs.get("environment", "default_environment")
26
+ tracer = kwargs.get("tracer")
27
+ metrics = kwargs.get("metrics_dict")
28
+ pricing_info = kwargs.get("pricing_info", {})
29
+ trace_content = kwargs.get("trace_content", False)
30
+ disable_metrics = kwargs.get("disable_metrics")
31
+ version = importlib.metadata.version("letta")
32
+
33
+ wrap_function_wrapper(
34
+ "letta.client.client",
35
+ "LocalClient.create_agent",
36
+ create_agent("letta.create_agent", version, environment, application_name,
37
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
38
+ )
39
+
40
+ wrap_function_wrapper(
41
+ "letta.client.client",
42
+ "LocalClient.get_agent",
43
+ create_agent("letta.get_agent", version, environment, application_name,
44
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
45
+ )
46
+
47
+ wrap_function_wrapper(
48
+ "letta.client.client",
49
+ "LocalClient.send_message",
50
+ send_message("letta.send_message", version, environment, application_name,
51
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
52
+ )
53
+
54
+ wrap_function_wrapper(
55
+ "letta.client.client",
56
+ "RESTClient.create_agent",
57
+ create_agent("letta.create_agent", version, environment, application_name,
58
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
59
+ )
60
+
61
+ wrap_function_wrapper(
62
+ "letta.client.client",
63
+ "RESTClient.get_agent",
64
+ create_agent("letta.get_agent", version, environment, application_name,
65
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
66
+ )
67
+
68
+ wrap_function_wrapper(
69
+ "letta.client.client",
70
+ "RESTClient.send_message",
71
+ send_message("letta.send_message", version, environment, application_name,
72
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
73
+ )
74
+
75
+ def _uninstrument(self, **kwargs):
76
+ # Proper uninstrumentation logic to revert patched methods
77
+ pass
@@ -0,0 +1,186 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument, too-many-branches
2
+ """
3
+ Module for monitoring Letta calls.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import (
10
+ handle_exception, get_chat_model_cost
11
+ )
12
+ from openlit.semcov import SemanticConvetion
13
+
14
+ # Initialize logger for logging potential issues and operations
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def create_agent(gen_ai_endpoint, version, environment, application_name,
18
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
19
+ """
20
+ Generates a telemetry wrapper for chat completions to collect metrics.
21
+
22
+ Args:
23
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
24
+ version: Version of the monitoring package.
25
+ environment: Deployment environment (e.g., production, staging).
26
+ application_name: Name of the application using the Letta Agent.
27
+ tracer: OpenTelemetry tracer for creating spans.
28
+ pricing_info: Information used for calculating the cost of Letta usage.
29
+ trace_content: Flag indicating whether to trace the actual content.
30
+
31
+ Returns:
32
+ A function that wraps the chat completions method to add telemetry.
33
+ """
34
+
35
+ def wrapper(wrapped, instance, args, kwargs):
36
+ """
37
+ Wraps the API call to add telemetry.
38
+
39
+ This collects metrics such as execution time, cost, and token usage, and handles errors
40
+ gracefully, adding details to the trace for observability.
41
+
42
+ Args:
43
+ wrapped: The original method to be wrapped.
44
+ instance: The instance of the class where the original method is defined.
45
+ args: Positional arguments for the method.
46
+ kwargs: Keyword arguments for the method.
47
+
48
+ Returns:
49
+ The response from the original method.
50
+ """
51
+
52
+ # pylint: disable=line-too-long
53
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
54
+ response = wrapped(*args, **kwargs)
55
+
56
+ try:
57
+ # Set base span attribues
58
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
59
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
60
+ SemanticConvetion.GEN_AI_SYSTEM_LETTA)
61
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
62
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
+ application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ID,
68
+ response.id)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ROLE,
70
+ response.name)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_INSTRUCTIONS,
72
+ response.system)
73
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
74
+ response.llm_config.model)
75
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
76
+ response.agent_type)
77
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TOOLS,
78
+ response.tool_names)
79
+
80
+ span.set_status(Status(StatusCode.OK))
81
+
82
+ # Return original response
83
+ return response
84
+
85
+ except Exception as e:
86
+ handle_exception(span, e)
87
+ logger.error("Error in trace creation: %s", e)
88
+
89
+ # Return original response
90
+ return response
91
+
92
+ return wrapper
93
+
94
+ def send_message(gen_ai_endpoint, version, environment, application_name,
95
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
96
+ """
97
+ Generates a telemetry wrapper for chat completions to collect metrics.
98
+
99
+ Args:
100
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
101
+ version: Version of the monitoring package.
102
+ environment: Deployment environment (e.g., production, staging).
103
+ application_name: Name of the application using the Letta Agent.
104
+ tracer: OpenTelemetry tracer for creating spans.
105
+ pricing_info: Information used for calculating the cost of Letta usage.
106
+ trace_content: Flag indicating whether to trace the actual content.
107
+
108
+ Returns:
109
+ A function that wraps the chat completions method to add telemetry.
110
+ """
111
+
112
+ def wrapper(wrapped, instance, args, kwargs):
113
+ """
114
+ Wraps the API call to add telemetry.
115
+
116
+ This collects metrics such as execution time, cost, and token usage, and handles errors
117
+ gracefully, adding details to the trace for observability.
118
+
119
+ Args:
120
+ wrapped: The original method to be wrapped.
121
+ instance: The instance of the class where the original method is defined.
122
+ args: Positional arguments for the method.
123
+ kwargs: Keyword arguments for the method.
124
+
125
+ Returns:
126
+ The response from the original method.
127
+ """
128
+
129
+ # pylint: disable=line-too-long
130
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
131
+ response = wrapped(*args, **kwargs)
132
+
133
+ try:
134
+ # Calculate cost of the operation
135
+ cost = get_chat_model_cost(kwargs.get("model", "gpt-4o"),
136
+ pricing_info, response.usage.prompt_tokens,
137
+ response.usage.completion_tokens)
138
+ # Set base span attribues
139
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
140
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
141
+ SemanticConvetion.GEN_AI_SYSTEM_LETTA)
142
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
143
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
144
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
145
+ gen_ai_endpoint)
146
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
147
+ application_name)
148
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_STEP_COUNT,
149
+ response.usage.step_count)
150
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_PROMPT_TOKENS,
151
+ response.usage.prompt_tokens)
152
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_COMPLETION_TOKENS,
153
+ response.usage.completion_tokens)
154
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_TOTAL_TOKENS,
155
+ response.usage.total_tokens)
156
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_COST,
157
+ cost)
158
+
159
+ if trace_content:
160
+ span.add_event(
161
+ name=SemanticConvetion.GEN_AI_CONTENT_PROMPT_EVENT,
162
+ attributes={
163
+ SemanticConvetion.GEN_AI_CONTENT_PROMPT: kwargs.get("message", ""),
164
+ },
165
+ )
166
+ span.add_event(
167
+ name=SemanticConvetion.GEN_AI_CONTENT_COMPLETION_EVENT,
168
+ # pylint: disable=line-too-long
169
+ attributes={
170
+ SemanticConvetion.GEN_AI_CONTENT_COMPLETION: str(response.messages),
171
+ },
172
+ )
173
+
174
+ span.set_status(Status(StatusCode.OK))
175
+
176
+ # Return original response
177
+ return response
178
+
179
+ except Exception as e:
180
+ handle_exception(span, e)
181
+ logger.error("Error in trace creation: %s", e)
182
+
183
+ # Return original response
184
+ return response
185
+
186
+ return wrapper
@@ -132,6 +132,8 @@ class SemanticConvetion:
132
132
  GEN_AI_SYSTEM_CONTROLFLOW = "controlflow"
133
133
  GEN_AI_SYSTEM_ASSEMBLYAI = "assemblyai"
134
134
  GEN_AI_SYSTEM_CRAWL4AI = "crawl4ai"
135
+ GEN_AI_SYSTEM_FIRECRAWL = "firecrawl"
136
+ GEN_AI_SYSTEM_LETTA = "letta"
135
137
 
136
138
  # Vector DB
137
139
  DB_OPERATION_API_ENDPOINT = "db.operation.api_endpoint"
@@ -200,6 +202,7 @@ class SemanticConvetion:
200
202
  GEN_AI_AGENT_TOOLS = "gen_ai.agent.tools"
201
203
  GEN_AI_AGENT_TOOL_RESULTS = "gen_ai.agent.tool_results"
202
204
  GEN_AI_AGENT_TASK = "gen_ai.agent.task"
205
+ GEN_AI_AGENT_PARAMS = "gen_ai.agent.params"
203
206
  GEN_AI_AGENT_INSTRUCTIONS = "gen_ai.agent.instructions"
204
207
  GEN_AI_AGENT_STORAGE = "gen_ai.agent.storage"
205
208
  GEN_AI_AGENT_EXPECTED_OUTPUT = "gen_ai.agent.expected_output"
@@ -212,6 +215,8 @@ class SemanticConvetion:
212
215
  GEN_AI_AGENT_RESPONSE_TIME = "gen_ai.agent.response_time"
213
216
  GEN_AI_AGENT_STRATEGY = "gen_ai.agent.strategy"
214
217
 
218
+ GEN_AI_AGENT_TYPE_BROWSER = "browser"
219
+
215
220
  # GPU
216
221
  GPU_INDEX = "gpu.index"
217
222
  GPU_UUID = "gpu.uuid"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.32.10
3
+ Version: 1.32.12
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -81,8 +81,8 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
81
81
  | [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
82
82
  | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
83
83
  | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | [✅ Crawl4AI](https://docs.openlit.io/latest/integrations/crawl4ai) | |
84
- | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
85
- | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
84
+ | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | [✅ FireCrawl](https://docs.openlit.io/latest/integrations/firecrawl) | |
85
+ | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | [✅ Letta](https://docs.openlit.io/latest/integrations/letta) | |
86
86
  | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
87
87
  | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
88
88
  | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
@@ -1,5 +1,5 @@
1
1
  openlit/__helpers.py,sha256=bqMxdNndLW5NGO2wwpAoHEOnAFr_mhnmVLua3ifpSEc,6427
2
- openlit/__init__.py,sha256=M9Ajc9c4iTOrXf3Mv7BowZah6h0quRZMqSeIYu79n5Y,21590
2
+ openlit/__init__.py,sha256=VBmUj1L-HKJnSIV6Ga1YHPgxdLZbNPj6T6DDIkdvzOM,21870
3
3
  openlit/evals/__init__.py,sha256=nJe99nuLo1b5rf7pt9U9BCdSDedzbVi2Fj96cgl7msM,380
4
4
  openlit/evals/all.py,sha256=oWrue3PotE-rB5WePG3MRYSA-ro6WivkclSHjYlAqGs,7154
5
5
  openlit/evals/bias_detection.py,sha256=mCdsfK7x1vX7S3psC3g641IMlZ-7df3h-V6eiICj5N8,8154
@@ -37,8 +37,8 @@ openlit/instrumentation/cohere/cohere.py,sha256=62-P2K39v6pIJme6vTVViLJ9PP8q_UWk
37
37
  openlit/instrumentation/controlflow/__init__.py,sha256=iKZ08IANfoN_n4o1TZJIK_C_t6RZQ6AS1H7kMfyBbYA,2118
38
38
  openlit/instrumentation/controlflow/controlflow.py,sha256=DP4KWBzcVg-zeCb4C6r-hK9_LdDzWNPBsOjbK-5WRqY,5528
39
39
  openlit/instrumentation/crawl4ai/__init__.py,sha256=CGkcbQijpKae_8GD_1ybDnCCk0MVu2AdV-ppFOg8mAA,1907
40
- openlit/instrumentation/crawl4ai/async_crawl4ai.py,sha256=YwAnKaLUg6BK72q6wbFGEQT2GSyITNhzdC5B1MP4QXw,4815
41
- openlit/instrumentation/crawl4ai/crawl4ai.py,sha256=D_i_wqOa86KC6XMPXTHLs-HhMPL9yJ9GCroq0wY4HFc,4797
40
+ openlit/instrumentation/crawl4ai/async_crawl4ai.py,sha256=vuauTyE8IyuhmoZpIZsOl-fk8P0oEFMZ7bQK9qNtRKs,4849
41
+ openlit/instrumentation/crawl4ai/crawl4ai.py,sha256=yohlpUuJbGBYrz-Gxz0owkoaeR9OPzK8NML8D87GVMA,4831
42
42
  openlit/instrumentation/crewai/__init__.py,sha256=cETkkwnKYEMAKlMrHbZ9-RvcRUPYaSNqNIhy2-vCDK8,1794
43
43
  openlit/instrumentation/crewai/crewai.py,sha256=mpEJql6aDs3wwBjLz686anOHkIA5gWfhFCCHAgJRY0w,7049
44
44
  openlit/instrumentation/dynamiq/__init__.py,sha256=2uIHHxFWca0g2YLO2RBfi2Al6uWUYvVZBfDiPOHCdpQ,2331
@@ -48,6 +48,8 @@ openlit/instrumentation/elevenlabs/async_elevenlabs.py,sha256=yMYACh95SFr5EYklKn
48
48
  openlit/instrumentation/elevenlabs/elevenlabs.py,sha256=mFnD7sgT47OxaXJz0Vc1nrNjXEpcGQDj5run3gA48Lw,6089
49
49
  openlit/instrumentation/embedchain/__init__.py,sha256=8TYk1OEbz46yF19dr-gB_x80VZMagU3kJ8-QihPXTeA,1929
50
50
  openlit/instrumentation/embedchain/embedchain.py,sha256=SLlr7qieT3kp4M6OYSRy8FaVCXQ2t3oPyIiE99ioNE4,7892
51
+ openlit/instrumentation/firecrawl/__init__.py,sha256=2QTcpPoaximsApdp68WD2iYR1_vZnKlkbAd4RHhgeOo,1836
52
+ openlit/instrumentation/firecrawl/firecrawl.py,sha256=Tk5bfZuzSArYYpTB4u9FNyXh8hfioc8yq1mXrXhig_o,3786
51
53
  openlit/instrumentation/google_ai_studio/__init__.py,sha256=rhHbEJbDQ-nH8y3AXzzyqNxcunR0ZEqR2RIstM55-Ms,2159
52
54
  openlit/instrumentation/google_ai_studio/async_google_ai_studio.py,sha256=20MHsp-tAONxOtmCFg5WDvktTdRce5CyH3_9w0b_AqI,13587
53
55
  openlit/instrumentation/google_ai_studio/google_ai_studio.py,sha256=zEQgbqqUQ8nSnqXl7zkFBJrdcFudhiPqM5KV43PtfbU,13552
@@ -64,6 +66,8 @@ openlit/instrumentation/julep/async_julep.py,sha256=OO8lIm9uUV1lhPo_klKBVyaDwgHh
64
66
  openlit/instrumentation/julep/julep.py,sha256=lDUmkEP4hXk7vgUUbNRD-mnfdfrZifXSFVVILs8Ttkw,5276
65
67
  openlit/instrumentation/langchain/__init__.py,sha256=gVtPZJifx-H8rqdZlU3GXdy3NtRF8yVb7PW7gE-ddJk,3592
66
68
  openlit/instrumentation/langchain/langchain.py,sha256=XzZ3AH5Ep-UwMlIWVohXaGcZmuDYfUDcc4yeL4HTXvk,37860
69
+ openlit/instrumentation/letta/__init__.py,sha256=sjjOuMvZ1EPGEluNW0WTuSXYPhrb453cBIizt88Ta3g,2951
70
+ openlit/instrumentation/letta/letta.py,sha256=V_RLcGPy3Y9shxeDEtaDu7dHMnDWA08ijxWAZuQPQWg,8292
67
71
  openlit/instrumentation/litellm/__init__.py,sha256=Z-LsVHKJdPganHfJA_rWg7xAfQYkvLfpLdF-eckU4qY,2401
68
72
  openlit/instrumentation/litellm/async_litellm.py,sha256=1MKNZbvKaf1lFWbXi1MQy3qFNNeXawav34SDlOQ_H3w,27544
69
73
  openlit/instrumentation/litellm/litellm.py,sha256=4YqCQ4CEQ4sfDu7pTlnflL_AfUqYEQdJDTO7nHJ6noY,27450
@@ -108,8 +112,8 @@ openlit/instrumentation/vllm/__init__.py,sha256=OVWalQ1dXvip1DUsjUGaHX4J-2FrSp-T
108
112
  openlit/instrumentation/vllm/vllm.py,sha256=lDzM7F5pgxvh8nKL0dcKB4TD0Mc9wXOWeXOsOGN7Wd8,6527
109
113
  openlit/otel/metrics.py,sha256=y7SQDTyfLakMrz0V4DThN-WAeap7YZzyndeYGSP6nVg,4516
110
114
  openlit/otel/tracing.py,sha256=fG3vl-flSZ30whCi7rrG25PlkIhhr8PhnfJYCkZzCD0,3895
111
- openlit/semcov/__init__.py,sha256=xCbAepANEnXzC8TTMM50l1VOc5iNZ6YXAy7rTw-KmXY,10768
112
- openlit-1.32.10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
113
- openlit-1.32.10.dist-info/METADATA,sha256=cH4MIdfCaxSfhKi4FsCFCUy0tcCtCkdORpxRgYdyhpA,22677
114
- openlit-1.32.10.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
115
- openlit-1.32.10.dist-info/RECORD,,
115
+ openlit/semcov/__init__.py,sha256=F30Ki_08YjPrMe73kjp5sulC0qxHp9e-VExbzAOM1YI,10935
116
+ openlit-1.32.12.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
117
+ openlit-1.32.12.dist-info/METADATA,sha256=hHrHGhViseJ9Cj6kyqrmMSIeudiOoQ93Mrbj_mnF8rQ,22735
118
+ openlit-1.32.12.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
119
+ openlit-1.32.12.dist-info/RECORD,,