openlit 1.32.4__py3-none-any.whl → 1.32.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
openlit/__init__.py CHANGED
@@ -58,6 +58,7 @@ from openlit.instrumentation.dynamiq import DynamiqInstrumentor
58
58
  from openlit.instrumentation.phidata import PhidataInstrumentor
59
59
  from openlit.instrumentation.julep import JulepInstrumentor
60
60
  from openlit.instrumentation.ai21 import AI21Instrumentor
61
+ from openlit.instrumentation.controlflow import ControlFlowInstrumentor
61
62
  from openlit.instrumentation.gpu import GPUInstrumentor
62
63
  import openlit.guard
63
64
  import openlit.evals
@@ -254,6 +255,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
254
255
  "julep": "julep",
255
256
  "astra": "astrapy",
256
257
  "ai21": "ai21",
258
+ "controlflow": "controlflow",
257
259
  }
258
260
 
259
261
  invalid_instrumentors = [
@@ -345,6 +347,7 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
345
347
  "julep": JulepInstrumentor(),
346
348
  "astra": AstraInstrumentor(),
347
349
  "ai21": AI21Instrumentor(),
350
+ "controlflow": ControlFlowInstrumentor(),
348
351
  }
349
352
 
350
353
  # Initialize and instrument only the enabled instrumentors
@@ -102,12 +102,8 @@ def converse(gen_ai_endpoint, version, environment, application_name, tracer,
102
102
  content = message["content"]
103
103
 
104
104
  if isinstance(content, list):
105
- content_str = ", ".join(
106
- # pylint: disable=line-too-long
107
- f'{item["type"]}: {item["text"] if "text" in item else item["image_url"]}'
108
- if "type" in item else f'text: {item["text"]}'
109
- for item in content
110
- )
105
+ # pylint: disable=line-too-long
106
+ content_str = ", ".join(f'text: {item["text"]}' for item in content if "text" in item)
111
107
  formatted_messages.append(f"{role}: {content_str}")
112
108
  else:
113
109
  formatted_messages.append(f"{role}: {content}")
@@ -0,0 +1,56 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of controlflow Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.controlflow.controlflow import (
10
+ wrap_controlflow
11
+ )
12
+
13
+ _instruments = ("controlflow >= 0.3.2",)
14
+
15
+ class ControlFlowInstrumentor(BaseInstrumentor):
16
+ """
17
+ An instrumentor for controlflow's client library.
18
+ """
19
+
20
+ def instrumentation_dependencies(self) -> Collection[str]:
21
+ return _instruments
22
+
23
+ def _instrument(self, **kwargs):
24
+ application_name = kwargs.get("application_name", "default_application")
25
+ environment = kwargs.get("environment", "default_environment")
26
+ tracer = kwargs.get("tracer")
27
+ metrics = kwargs.get("metrics_dict")
28
+ pricing_info = kwargs.get("pricing_info", {})
29
+ trace_content = kwargs.get("trace_content", False)
30
+ disable_metrics = kwargs.get("disable_metrics")
31
+ version = importlib.metadata.version("controlflow")
32
+
33
+ wrap_function_wrapper(
34
+ "controlflow.agents.agent",
35
+ "Agent.__init__",
36
+ wrap_controlflow("controlflow.create_agent", version, environment, application_name,
37
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
38
+ )
39
+
40
+ wrap_function_wrapper(
41
+ "controlflow.tasks.task",
42
+ "Task.__init__",
43
+ wrap_controlflow("controlflow.create_task", version, environment, application_name,
44
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
45
+ )
46
+
47
+ wrap_function_wrapper(
48
+ "controlflow",
49
+ "run",
50
+ wrap_controlflow("controlflow.run", version, environment, application_name,
51
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
52
+ )
53
+
54
+ def _uninstrument(self, **kwargs):
55
+ # Proper uninstrumentation logic to revert patched methods
56
+ pass
@@ -0,0 +1,113 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument, bare-except
2
+ """
3
+ Module for monitoring controlflow.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import handle_exception
10
+ from openlit.semcov import SemanticConvetion
11
+
12
+ # Initialize logger for logging potential issues and operations
13
+ logger = logging.getLogger(__name__)
14
+
15
+ def wrap_controlflow(gen_ai_endpoint, version, environment, application_name,
16
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
17
+ """
18
+ Creates a wrapper around a function call to trace and log its execution metrics.
19
+
20
+ This function wraps any given function to measure its execution time,
21
+ log its operation, and trace its execution using OpenTelemetry.
22
+
23
+ Parameters:
24
+ - gen_ai_endpoint (str): A descriptor or name for the endpoint being traced.
25
+ - version (str): The version of the Langchain application.
26
+ - environment (str): The deployment environment (e.g., 'production', 'development').
27
+ - application_name (str): Name of the Langchain application.
28
+ - tracer (opentelemetry.trace.Tracer): The tracer object used for OpenTelemetry tracing.
29
+ - pricing_info (dict): Information about the pricing for internal metrics (currently not used).
30
+ - trace_content (bool): Flag indicating whether to trace the content of the response.
31
+
32
+ Returns:
33
+ - function: A higher-order function that takes a function 'wrapped' and returns
34
+ a new function that wraps 'wrapped' with additional tracing and logging.
35
+ """
36
+
37
+ def wrapper(wrapped, instance, args, kwargs):
38
+ """
39
+ An inner wrapper function that executes the wrapped function, measures execution
40
+ time, and records trace data using OpenTelemetry.
41
+
42
+ Parameters:
43
+ - wrapped (Callable): The original function that this wrapper will execute.
44
+ - instance (object): The instance to which the wrapped function belongs. This
45
+ is used for instance methods. For static and classmethods,
46
+ this may be None.
47
+ - args (tuple): Positional arguments passed to the wrapped function.
48
+ - kwargs (dict): Keyword arguments passed to the wrapped function.
49
+
50
+ Returns:
51
+ - The result of the wrapped function call.
52
+
53
+ The wrapper initiates a span with the provided tracer, sets various attributes
54
+ on the span based on the function's execution and response, and ensures
55
+ errors are handled and logged appropriately.
56
+ """
57
+
58
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
59
+ response = wrapped(*args, **kwargs)
60
+
61
+ try:
62
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
66
+ SemanticConvetion.GEN_AI_SYSTEM_CONTROLFLOW)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
70
+ application_name)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
72
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
73
+
74
+ if gen_ai_endpoint == "controlflow.create_agent":
75
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ROLE,
76
+ instance.name)
77
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_INSTRUCTIONS,
78
+ kwargs.get("instructions", ""))
79
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TOOLS,
80
+ str(kwargs.get("tools", "")))
81
+
82
+ try:
83
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
84
+ instance.model.model_name)
85
+ except:
86
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
87
+ kwargs.get("model", "openai/gpt-4o-mini"))
88
+
89
+ elif gen_ai_endpoint == "controlflow.create_task":
90
+ if kwargs.get("objective","") == "":
91
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_GOAL,
92
+ str(args[0]))
93
+ else:
94
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_GOAL,
95
+ kwargs.get("objective",""))
96
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_INSTRUCTIONS,
97
+ kwargs.get("instructions", ""))
98
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_CONTEXT,
99
+ str(kwargs.get("context", "")))
100
+
101
+ span.set_status(Status(StatusCode.OK))
102
+
103
+ # Return original response
104
+ return response
105
+
106
+ except Exception as e:
107
+ handle_exception(span, e)
108
+ logger.error("Error in trace creation: %s", e)
109
+
110
+ # Return original response
111
+ return response
112
+
113
+ return wrapper
@@ -103,12 +103,15 @@ def chat_completions(gen_ai_endpoint, version, environment, application_name,
103
103
  content = message["content"]
104
104
 
105
105
  if isinstance(content, list):
106
- content_str = ", ".join(
107
- # pylint: disable=line-too-long
108
- f'{item["type"]}: {item["text"] if "text" in item else item["image_url"]}'
109
- if "type" in item else f'text: {item["text"]}'
110
- for item in content
111
- )
106
+ content_str_list = []
107
+ for item in content:
108
+ if item["type"] == "text":
109
+ content_str_list.append(f'text: {item["text"]}')
110
+ elif (item["type"] == "image_url" and
111
+ not item["image_url"]["url"].startswith("data:")):
112
+ # pylint: disable=line-too-long
113
+ content_str_list.append(f'image_url: {item["image_url"]["url"]}')
114
+ content_str = ", ".join(content_str_list)
112
115
  formatted_messages.append(f"{role}: {content_str}")
113
116
  else:
114
117
  formatted_messages.append(f"{role}: {content}")
@@ -128,6 +128,7 @@ class SemanticConvetion:
128
128
  GEN_AI_SYSTEM_PHIDATA = "phidata"
129
129
  GEN_AI_SYSTEM_JULEP = "julep"
130
130
  GEN_AI_SYSTEM_AI21 = "ai21"
131
+ GEN_AI_SYSTEM_CONTROLFLOW = "controlflow"
131
132
 
132
133
  # Vector DB
133
134
  DB_OPERATION_API_ENDPOINT = "db.operation.api_endpoint"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.32.4
3
+ Version: 1.32.6
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -79,7 +79,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
79
79
  | [✅ AI21](https://docs.openlit.io/latest/integrations/ai21) | | [✅ mem0](https://docs.openlit.io/latest/integrations/mem0) | |
80
80
  | [✅ Vertex AI](https://docs.openlit.io/latest/integrations/vertexai) | | [✅ MultiOn](https://docs.openlit.io/latest/integrations/multion) | |
81
81
  | [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
82
- | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | | |
82
+ | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
83
83
  | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | | |
84
84
  | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
85
85
  | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
@@ -1,5 +1,5 @@
1
1
  openlit/__helpers.py,sha256=2OkGKOdsd9Hc011WxR70OqDlO6c4mZcu6McGuW1uAdA,6316
2
- openlit/__init__.py,sha256=Hbttvb22kSfAnS1pMRPbePxvV-1QhsT2cgdPo_Kn4WY,21122
2
+ openlit/__init__.py,sha256=FUNyEH5AnzoGuxiKOyngHcyoMjuRlGj76fmgQEEV9k0,21286
3
3
  openlit/evals/__init__.py,sha256=nJe99nuLo1b5rf7pt9U9BCdSDedzbVi2Fj96cgl7msM,380
4
4
  openlit/evals/all.py,sha256=oWrue3PotE-rB5WePG3MRYSA-ro6WivkclSHjYlAqGs,7154
5
5
  openlit/evals/bias_detection.py,sha256=mCdsfK7x1vX7S3psC3g641IMlZ-7df3h-V6eiICj5N8,8154
@@ -27,11 +27,13 @@ openlit/instrumentation/azure_ai_inference/__init__.py,sha256=Xl_4hjQeXcA-NgkqwT
27
27
  openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py,sha256=T3SLSJxwrjOaGGkedB6DT92SCHLWbaJu5YAzZzAeBsk,22748
28
28
  openlit/instrumentation/azure_ai_inference/azure_ai_inference.py,sha256=IzwDZ99h7HpOI-NnEkYqOIh2sAm-2aHi4BcTMoXNx1c,22694
29
29
  openlit/instrumentation/bedrock/__init__.py,sha256=DLLYio4S4gUzRElqNRT8WMKzM79HZwOBVjXfJI4BfaA,1545
30
- openlit/instrumentation/bedrock/bedrock.py,sha256=HqRZeiAFeNdlhlnt4DSLda8qkMP3nPKq_zhdxDssXmY,9498
30
+ openlit/instrumentation/bedrock/bedrock.py,sha256=F-n2WMlppxb7wM7UWEu1wqRZEpFzcGXWVmN9v9mAfeE,9288
31
31
  openlit/instrumentation/chroma/__init__.py,sha256=61lFpHlUEQUobsUJZHXdvOViKwsOH8AOvSfc4VgCmiM,3253
32
32
  openlit/instrumentation/chroma/chroma.py,sha256=E80j_41UeZi8RzTsHbpvi1izOA_n-0-3_VdrA68AJPA,10531
33
33
  openlit/instrumentation/cohere/__init__.py,sha256=PC5T1qIg9pwLNocBP_WjG5B_6p_z019s8quk_fNLAMs,1920
34
34
  openlit/instrumentation/cohere/cohere.py,sha256=62-P2K39v6pIJme6vTVViLJ9PP8q_UWkTv2l3Wa2gHA,21217
35
+ openlit/instrumentation/controlflow/__init__.py,sha256=iKZ08IANfoN_n4o1TZJIK_C_t6RZQ6AS1H7kMfyBbYA,2118
36
+ openlit/instrumentation/controlflow/controlflow.py,sha256=DP4KWBzcVg-zeCb4C6r-hK9_LdDzWNPBsOjbK-5WRqY,5528
35
37
  openlit/instrumentation/crewai/__init__.py,sha256=cETkkwnKYEMAKlMrHbZ9-RvcRUPYaSNqNIhy2-vCDK8,1794
36
38
  openlit/instrumentation/crewai/crewai.py,sha256=mpEJql6aDs3wwBjLz686anOHkIA5gWfhFCCHAgJRY0w,7049
37
39
  openlit/instrumentation/dynamiq/__init__.py,sha256=2uIHHxFWca0g2YLO2RBfi2Al6uWUYvVZBfDiPOHCdpQ,2331
@@ -79,7 +81,7 @@ openlit/instrumentation/openai/__init__.py,sha256=AZ2cPr3TMKkgGdMl_yXMeSi7bWhtmM
79
81
  openlit/instrumentation/openai/async_azure_openai.py,sha256=XbST1UE_zXzNL6RX2XwCsK_a6IhG9PHVTMKBjGrUcB0,48961
80
82
  openlit/instrumentation/openai/async_openai.py,sha256=XFsfN81mbmdgRON2dwmt8pypqoTnlrNWer1eit7wZbQ,50176
81
83
  openlit/instrumentation/openai/azure_openai.py,sha256=dZUc5MtCwg_sZJWiruG6exYGhPAm-339sqs3sKZNRPU,48761
82
- openlit/instrumentation/openai/openai.py,sha256=qP3ahUyMGjmq2ZB8apqnERal7kz49uW5DaxDU9FBQdk,50005
84
+ openlit/instrumentation/openai/openai.py,sha256=2udwA-MxLJfpKERTipCEIcRyLerPJT9IaFc_OmZPZ5U,50262
83
85
  openlit/instrumentation/phidata/__init__.py,sha256=rfPCXYOIsJbxChee2p269UzkJ1Z-pvQbii7Fgrw1v2g,1527
84
86
  openlit/instrumentation/phidata/phidata.py,sha256=9Aza2bLgeq688Ahyy7ekbxpSh4RTD7FFKtLmv4TNbrw,4667
85
87
  openlit/instrumentation/pinecone/__init__.py,sha256=Mv9bElqNs07_JQkYyNnO0wOM3hdbprmw7sttdMeKC7g,2526
@@ -101,8 +103,8 @@ openlit/instrumentation/vllm/__init__.py,sha256=OVWalQ1dXvip1DUsjUGaHX4J-2FrSp-T
101
103
  openlit/instrumentation/vllm/vllm.py,sha256=lDzM7F5pgxvh8nKL0dcKB4TD0Mc9wXOWeXOsOGN7Wd8,6527
102
104
  openlit/otel/metrics.py,sha256=y7SQDTyfLakMrz0V4DThN-WAeap7YZzyndeYGSP6nVg,4516
103
105
  openlit/otel/tracing.py,sha256=fG3vl-flSZ30whCi7rrG25PlkIhhr8PhnfJYCkZzCD0,3895
104
- openlit/semcov/__init__.py,sha256=AadYgIBy1SxUOoe3lX9TeOamFJzBKLde91ONjnwg2LA,10470
105
- openlit-1.32.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
106
- openlit-1.32.4.dist-info/METADATA,sha256=KK34ruzPOzuf9YQ1C-pReci2wnAcbmPugwIzrWcCbDY,22412
107
- openlit-1.32.4.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
108
- openlit-1.32.4.dist-info/RECORD,,
106
+ openlit/semcov/__init__.py,sha256=oW2jh7dSKrOFsbO7tdXSA5yqUq6iN5A9QsgjfIeApGA,10516
107
+ openlit-1.32.6.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
108
+ openlit-1.32.6.dist-info/METADATA,sha256=xR9rBVB9PwR5FpQQEuK4zvFKQkSbwGIt1V6N4coxHuY,22441
109
+ openlit-1.32.6.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
110
+ openlit-1.32.6.dist-info/RECORD,,