openlit 1.32.3__py3-none-any.whl → 1.32.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,113 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument, bare-except
2
+ """
3
+ Module for monitoring controlflow.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import handle_exception
10
+ from openlit.semcov import SemanticConvetion
11
+
12
+ # Initialize logger for logging potential issues and operations
13
+ logger = logging.getLogger(__name__)
14
+
15
+ def wrap_controlflow(gen_ai_endpoint, version, environment, application_name,
16
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
17
+ """
18
+ Creates a wrapper around a function call to trace and log its execution metrics.
19
+
20
+ This function wraps any given function to measure its execution time,
21
+ log its operation, and trace its execution using OpenTelemetry.
22
+
23
+ Parameters:
24
+ - gen_ai_endpoint (str): A descriptor or name for the endpoint being traced.
25
+ - version (str): The version of the Langchain application.
26
+ - environment (str): The deployment environment (e.g., 'production', 'development').
27
+ - application_name (str): Name of the Langchain application.
28
+ - tracer (opentelemetry.trace.Tracer): The tracer object used for OpenTelemetry tracing.
29
+ - pricing_info (dict): Information about the pricing for internal metrics (currently not used).
30
+ - trace_content (bool): Flag indicating whether to trace the content of the response.
31
+
32
+ Returns:
33
+ - function: A higher-order function that takes a function 'wrapped' and returns
34
+ a new function that wraps 'wrapped' with additional tracing and logging.
35
+ """
36
+
37
+ def wrapper(wrapped, instance, args, kwargs):
38
+ """
39
+ An inner wrapper function that executes the wrapped function, measures execution
40
+ time, and records trace data using OpenTelemetry.
41
+
42
+ Parameters:
43
+ - wrapped (Callable): The original function that this wrapper will execute.
44
+ - instance (object): The instance to which the wrapped function belongs. This
45
+ is used for instance methods. For static and classmethods,
46
+ this may be None.
47
+ - args (tuple): Positional arguments passed to the wrapped function.
48
+ - kwargs (dict): Keyword arguments passed to the wrapped function.
49
+
50
+ Returns:
51
+ - The result of the wrapped function call.
52
+
53
+ The wrapper initiates a span with the provided tracer, sets various attributes
54
+ on the span based on the function's execution and response, and ensures
55
+ errors are handled and logged appropriately.
56
+ """
57
+
58
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
59
+ response = wrapped(*args, **kwargs)
60
+
61
+ try:
62
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
66
+ SemanticConvetion.GEN_AI_SYSTEM_CONTROLFLOW)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
70
+ application_name)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
72
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
73
+
74
+ if gen_ai_endpoint == "controlflow.create_agent":
75
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ROLE,
76
+ instance.name)
77
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_INSTRUCTIONS,
78
+ kwargs.get("instructions", ""))
79
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TOOLS,
80
+ str(kwargs.get("tools", "")))
81
+
82
+ try:
83
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
84
+ instance.model.model_name)
85
+ except:
86
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
87
+ kwargs.get("model", "openai/gpt-4o-mini"))
88
+
89
+ elif gen_ai_endpoint == "controlflow.create_task":
90
+ if kwargs.get("objective","") == "":
91
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_GOAL,
92
+ str(args[0]))
93
+ else:
94
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_GOAL,
95
+ kwargs.get("objective",""))
96
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_INSTRUCTIONS,
97
+ kwargs.get("instructions", ""))
98
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_CONTEXT,
99
+ str(kwargs.get("context", "")))
100
+
101
+ span.set_status(Status(StatusCode.OK))
102
+
103
+ # Return original response
104
+ return response
105
+
106
+ except Exception as e:
107
+ handle_exception(span, e)
108
+ logger.error("Error in trace creation: %s", e)
109
+
110
+ # Return original response
111
+ return response
112
+
113
+ return wrapper
@@ -103,12 +103,15 @@ def chat_completions(gen_ai_endpoint, version, environment, application_name,
103
103
  content = message["content"]
104
104
 
105
105
  if isinstance(content, list):
106
- content_str = ", ".join(
107
- # pylint: disable=line-too-long
108
- f'{item["type"]}: {item["text"] if "text" in item else item["image_url"]}'
109
- if "type" in item else f'text: {item["text"]}'
110
- for item in content
111
- )
106
+ content_str_list = []
107
+ for item in content:
108
+ if item["type"] == "text":
109
+ content_str_list.append(f'text: {item["text"]}')
110
+ elif (item["type"] == "image_url" and
111
+ not item["image_url"]["url"].startswith("data:")):
112
+ # pylint: disable=line-too-long
113
+ content_str_list.append(f'image_url: {item["image_url"]["url"]}')
114
+ content_str = ", ".join(content_str_list)
112
115
  formatted_messages.append(f"{role}: {content_str}")
113
116
  else:
114
117
  formatted_messages.append(f"{role}: {content}")
@@ -76,6 +76,14 @@ class SemanticConvetion:
76
76
  GEN_AI_CONTENT_COMPLETION = "gen_ai.completion"
77
77
  GEN_AI_CONTENT_REVISED_PROMPT = "gen_ai.content.revised_prompt"
78
78
 
79
+ # GenAI Rag
80
+ GEN_AI_RAG_MAX_SEGMENTS = "gen_ai.rag.max_segments"
81
+ GEN_AI_RAG_STRATEGY = "gen_ai.rag.strategy"
82
+ GEN_AI_RAG_SIMILARITY_THRESHOLD = "gen_ai.rag.similarity_threshold"
83
+ GEN_AI_RAG_MAX_NEIGHBORS = "gen_ai.rag.max_neighbors"
84
+ GEN_AI_RAG_DOCUMENTS_PATH = "gen_ai.rag.documents_path"
85
+ GEN_AI_RAG_FILE_IDS = "gen_ai.rag.file_ids"
86
+
79
87
  # GenAI Evaluation Metrics
80
88
  GEN_AI_EVAL_CONTEXT_RELEVANCY = "gen_ai.eval.context_relevancy"
81
89
  GEN_AI_EVAL_ANSWER_RELEVANCY = "gen_ai.eval.answer_relevancy"
@@ -119,6 +127,8 @@ class SemanticConvetion:
119
127
  GEN_AI_SYSTEM_DYNAMIQ = "dynamiq"
120
128
  GEN_AI_SYSTEM_PHIDATA = "phidata"
121
129
  GEN_AI_SYSTEM_JULEP = "julep"
130
+ GEN_AI_SYSTEM_AI21 = "ai21"
131
+ GEN_AI_SYSTEM_CONTROLFLOW = "controlflow"
122
132
 
123
133
  # Vector DB
124
134
  DB_OPERATION_API_ENDPOINT = "db.operation.api_endpoint"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.32.3
3
+ Version: 1.32.6
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -76,19 +76,18 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
76
76
  | [✅ GitHub AI Models](https://docs.openlit.io/latest/integrations/github-models) | | [✅ AG2](https://docs.openlit.io/latest/integrations/ag2) | |
77
77
  | [✅ HuggingFace Transformers](https://docs.openlit.io/latest/integrations/huggingface) | | [✅ Dynamiq](https://docs.openlit.io/latest/integrations/dynamiq) | |
78
78
  | [✅ Amazon Bedrock](https://docs.openlit.io/latest/integrations/bedrock) | | [✅ Phidata](https://docs.openlit.io/latest/integrations/phidata) | |
79
- | | | [✅ mem0](https://docs.openlit.io/latest/integrations/mem0) | |
80
- | [✅ Vertex AI](https://docs.openlit.io/latest/integrations/vertexai) | | [✅ MultiOn](https://docs.openlit.io/latest/integrations/multion) | |
81
- | [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
82
- | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | | |
83
- | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | | |
84
- | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
85
- | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
86
- | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
87
- | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
88
- | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
89
- | [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
90
- | [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
91
-
79
+ | [✅ AI21](https://docs.openlit.io/latest/integrations/ai21) | | [✅ mem0](https://docs.openlit.io/latest/integrations/mem0) | |
80
+ | [✅ Vertex AI](https://docs.openlit.io/latest/integrations/vertexai) | | [✅ MultiOn](https://docs.openlit.io/latest/integrations/multion) | |
81
+ | [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
82
+ | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
83
+ | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | | |
84
+ | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
85
+ | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
86
+ | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
87
+ | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
88
+ | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
89
+ | [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
90
+ | [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
92
91
 
93
92
  ## Supported Destinations
94
93
  - [✅ OpenTelemetry Collector](https://docs.openlit.io/latest/connections/otelcol)
@@ -1,5 +1,5 @@
1
1
  openlit/__helpers.py,sha256=2OkGKOdsd9Hc011WxR70OqDlO6c4mZcu6McGuW1uAdA,6316
2
- openlit/__init__.py,sha256=aFhA4yNRGE17CB1heL4GEWTZcPQZpJ1-7_yZel8cm6A,21000
2
+ openlit/__init__.py,sha256=FUNyEH5AnzoGuxiKOyngHcyoMjuRlGj76fmgQEEV9k0,21286
3
3
  openlit/evals/__init__.py,sha256=nJe99nuLo1b5rf7pt9U9BCdSDedzbVi2Fj96cgl7msM,380
4
4
  openlit/evals/all.py,sha256=oWrue3PotE-rB5WePG3MRYSA-ro6WivkclSHjYlAqGs,7154
5
5
  openlit/evals/bias_detection.py,sha256=mCdsfK7x1vX7S3psC3g641IMlZ-7df3h-V6eiICj5N8,8154
@@ -14,6 +14,9 @@ openlit/guard/sensitive_topic.py,sha256=RgVw_laFERv0nNdzBsAd2_3yLomMOK-gVq-P7oj1
14
14
  openlit/guard/utils.py,sha256=x0-_hAtNa_ogYR2GfnwiBF1rlqaXtaJ-rJeGguTDe-Q,7663
15
15
  openlit/instrumentation/ag2/__init__.py,sha256=Nf9cDoXB16NYgZisvVQduFYJ5fpU90CNlMrIF4pSH-Y,1827
16
16
  openlit/instrumentation/ag2/ag2.py,sha256=_ncg8RqUH-wXMYfaOYx2bcQOrOrDMVVm0EZAEkWdBn0,4444
17
+ openlit/instrumentation/ai21/__init__.py,sha256=O9xxo_-g3H8TCam2OzTamp6IQnYOCcBD_5B0OOSZpYs,2653
18
+ openlit/instrumentation/ai21/ai21.py,sha256=MfDrqvzigoWsrF_k6ri6c5IAMIgpdgrumckqxJmBXws,31687
19
+ openlit/instrumentation/ai21/async_ai21.py,sha256=OVDKt9Ymlp0OTCNVEirvRwqMSL5VZHppnwJTOkH8es8,31790
17
20
  openlit/instrumentation/anthropic/__init__.py,sha256=oaU53BOPyfUKbEzYvLr1DPymDluurSnwo4Hernf2XdU,1955
18
21
  openlit/instrumentation/anthropic/anthropic.py,sha256=y7CEGhKOGHWt8G_5Phr4qPJTfPGRJIAr9Yk6nM3CcvM,16775
19
22
  openlit/instrumentation/anthropic/async_anthropic.py,sha256=Zz1KRKIG9wGn0quOoLvjORC-49IvHQpJ6GBdB-4PfCQ,16816
@@ -24,11 +27,13 @@ openlit/instrumentation/azure_ai_inference/__init__.py,sha256=Xl_4hjQeXcA-NgkqwT
24
27
  openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py,sha256=T3SLSJxwrjOaGGkedB6DT92SCHLWbaJu5YAzZzAeBsk,22748
25
28
  openlit/instrumentation/azure_ai_inference/azure_ai_inference.py,sha256=IzwDZ99h7HpOI-NnEkYqOIh2sAm-2aHi4BcTMoXNx1c,22694
26
29
  openlit/instrumentation/bedrock/__init__.py,sha256=DLLYio4S4gUzRElqNRT8WMKzM79HZwOBVjXfJI4BfaA,1545
27
- openlit/instrumentation/bedrock/bedrock.py,sha256=HqRZeiAFeNdlhlnt4DSLda8qkMP3nPKq_zhdxDssXmY,9498
30
+ openlit/instrumentation/bedrock/bedrock.py,sha256=F-n2WMlppxb7wM7UWEu1wqRZEpFzcGXWVmN9v9mAfeE,9288
28
31
  openlit/instrumentation/chroma/__init__.py,sha256=61lFpHlUEQUobsUJZHXdvOViKwsOH8AOvSfc4VgCmiM,3253
29
32
  openlit/instrumentation/chroma/chroma.py,sha256=E80j_41UeZi8RzTsHbpvi1izOA_n-0-3_VdrA68AJPA,10531
30
33
  openlit/instrumentation/cohere/__init__.py,sha256=PC5T1qIg9pwLNocBP_WjG5B_6p_z019s8quk_fNLAMs,1920
31
34
  openlit/instrumentation/cohere/cohere.py,sha256=62-P2K39v6pIJme6vTVViLJ9PP8q_UWkTv2l3Wa2gHA,21217
35
+ openlit/instrumentation/controlflow/__init__.py,sha256=iKZ08IANfoN_n4o1TZJIK_C_t6RZQ6AS1H7kMfyBbYA,2118
36
+ openlit/instrumentation/controlflow/controlflow.py,sha256=DP4KWBzcVg-zeCb4C6r-hK9_LdDzWNPBsOjbK-5WRqY,5528
32
37
  openlit/instrumentation/crewai/__init__.py,sha256=cETkkwnKYEMAKlMrHbZ9-RvcRUPYaSNqNIhy2-vCDK8,1794
33
38
  openlit/instrumentation/crewai/crewai.py,sha256=mpEJql6aDs3wwBjLz686anOHkIA5gWfhFCCHAgJRY0w,7049
34
39
  openlit/instrumentation/dynamiq/__init__.py,sha256=2uIHHxFWca0g2YLO2RBfi2Al6uWUYvVZBfDiPOHCdpQ,2331
@@ -76,7 +81,7 @@ openlit/instrumentation/openai/__init__.py,sha256=AZ2cPr3TMKkgGdMl_yXMeSi7bWhtmM
76
81
  openlit/instrumentation/openai/async_azure_openai.py,sha256=XbST1UE_zXzNL6RX2XwCsK_a6IhG9PHVTMKBjGrUcB0,48961
77
82
  openlit/instrumentation/openai/async_openai.py,sha256=XFsfN81mbmdgRON2dwmt8pypqoTnlrNWer1eit7wZbQ,50176
78
83
  openlit/instrumentation/openai/azure_openai.py,sha256=dZUc5MtCwg_sZJWiruG6exYGhPAm-339sqs3sKZNRPU,48761
79
- openlit/instrumentation/openai/openai.py,sha256=qP3ahUyMGjmq2ZB8apqnERal7kz49uW5DaxDU9FBQdk,50005
84
+ openlit/instrumentation/openai/openai.py,sha256=2udwA-MxLJfpKERTipCEIcRyLerPJT9IaFc_OmZPZ5U,50262
80
85
  openlit/instrumentation/phidata/__init__.py,sha256=rfPCXYOIsJbxChee2p269UzkJ1Z-pvQbii7Fgrw1v2g,1527
81
86
  openlit/instrumentation/phidata/phidata.py,sha256=9Aza2bLgeq688Ahyy7ekbxpSh4RTD7FFKtLmv4TNbrw,4667
82
87
  openlit/instrumentation/pinecone/__init__.py,sha256=Mv9bElqNs07_JQkYyNnO0wOM3hdbprmw7sttdMeKC7g,2526
@@ -98,8 +103,8 @@ openlit/instrumentation/vllm/__init__.py,sha256=OVWalQ1dXvip1DUsjUGaHX4J-2FrSp-T
98
103
  openlit/instrumentation/vllm/vllm.py,sha256=lDzM7F5pgxvh8nKL0dcKB4TD0Mc9wXOWeXOsOGN7Wd8,6527
99
104
  openlit/otel/metrics.py,sha256=y7SQDTyfLakMrz0V4DThN-WAeap7YZzyndeYGSP6nVg,4516
100
105
  openlit/otel/tracing.py,sha256=fG3vl-flSZ30whCi7rrG25PlkIhhr8PhnfJYCkZzCD0,3895
101
- openlit/semcov/__init__.py,sha256=knUQxtYMGrW3fwfx8uXGn0E17WLxVPTkWVNQWvY4w8g,10079
102
- openlit-1.32.3.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
103
- openlit-1.32.3.dist-info/METADATA,sha256=qrskMzUSxlj4SSzStMeKinTmB9dArpURwvS-s5NGJAY,22430
104
- openlit-1.32.3.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
105
- openlit-1.32.3.dist-info/RECORD,,
106
+ openlit/semcov/__init__.py,sha256=oW2jh7dSKrOFsbO7tdXSA5yqUq6iN5A9QsgjfIeApGA,10516
107
+ openlit-1.32.6.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
108
+ openlit-1.32.6.dist-info/METADATA,sha256=xR9rBVB9PwR5FpQQEuK4zvFKQkSbwGIt1V6N4coxHuY,22441
109
+ openlit-1.32.6.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
110
+ openlit-1.32.6.dist-info/RECORD,,