openlit 1.33.9__py3-none-any.whl → 1.33.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. openlit/__helpers.py +78 -0
  2. openlit/__init__.py +41 -13
  3. openlit/instrumentation/ag2/__init__.py +9 -10
  4. openlit/instrumentation/ag2/ag2.py +134 -69
  5. openlit/instrumentation/ai21/__init__.py +6 -5
  6. openlit/instrumentation/ai21/ai21.py +71 -534
  7. openlit/instrumentation/ai21/async_ai21.py +71 -534
  8. openlit/instrumentation/ai21/utils.py +407 -0
  9. openlit/instrumentation/anthropic/__init__.py +3 -3
  10. openlit/instrumentation/anthropic/anthropic.py +5 -5
  11. openlit/instrumentation/anthropic/async_anthropic.py +5 -5
  12. openlit/instrumentation/assemblyai/__init__.py +2 -2
  13. openlit/instrumentation/assemblyai/assemblyai.py +3 -3
  14. openlit/instrumentation/astra/__init__.py +25 -25
  15. openlit/instrumentation/astra/astra.py +7 -7
  16. openlit/instrumentation/astra/async_astra.py +7 -7
  17. openlit/instrumentation/azure_ai_inference/__init__.py +5 -5
  18. openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +11 -11
  19. openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +11 -11
  20. openlit/instrumentation/bedrock/__init__.py +2 -2
  21. openlit/instrumentation/bedrock/bedrock.py +3 -3
  22. openlit/instrumentation/chroma/__init__.py +9 -9
  23. openlit/instrumentation/chroma/chroma.py +7 -7
  24. openlit/instrumentation/cohere/__init__.py +7 -7
  25. openlit/instrumentation/cohere/async_cohere.py +10 -10
  26. openlit/instrumentation/cohere/cohere.py +11 -11
  27. openlit/instrumentation/controlflow/__init__.py +4 -4
  28. openlit/instrumentation/controlflow/controlflow.py +5 -5
  29. openlit/instrumentation/crawl4ai/__init__.py +3 -3
  30. openlit/instrumentation/crawl4ai/async_crawl4ai.py +5 -5
  31. openlit/instrumentation/crawl4ai/crawl4ai.py +5 -5
  32. openlit/instrumentation/crewai/__init__.py +3 -3
  33. openlit/instrumentation/crewai/crewai.py +6 -4
  34. openlit/instrumentation/dynamiq/__init__.py +5 -5
  35. openlit/instrumentation/dynamiq/dynamiq.py +5 -5
  36. openlit/instrumentation/elevenlabs/__init__.py +5 -5
  37. openlit/instrumentation/elevenlabs/async_elevenlabs.py +4 -5
  38. openlit/instrumentation/elevenlabs/elevenlabs.py +4 -5
  39. openlit/instrumentation/embedchain/__init__.py +2 -2
  40. openlit/instrumentation/embedchain/embedchain.py +9 -9
  41. openlit/instrumentation/firecrawl/__init__.py +3 -3
  42. openlit/instrumentation/firecrawl/firecrawl.py +5 -5
  43. openlit/instrumentation/google_ai_studio/__init__.py +3 -3
  44. openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +3 -3
  45. openlit/instrumentation/google_ai_studio/google_ai_studio.py +3 -3
  46. openlit/instrumentation/gpt4all/__init__.py +5 -5
  47. openlit/instrumentation/gpt4all/gpt4all.py +350 -225
  48. openlit/instrumentation/gpu/__init__.py +5 -5
  49. openlit/instrumentation/groq/__init__.py +5 -5
  50. openlit/instrumentation/groq/async_groq.py +359 -243
  51. openlit/instrumentation/groq/groq.py +359 -243
  52. openlit/instrumentation/haystack/__init__.py +2 -2
  53. openlit/instrumentation/haystack/haystack.py +5 -5
  54. openlit/instrumentation/julep/__init__.py +7 -7
  55. openlit/instrumentation/julep/async_julep.py +6 -6
  56. openlit/instrumentation/julep/julep.py +6 -6
  57. openlit/instrumentation/langchain/__init__.py +15 -9
  58. openlit/instrumentation/langchain/async_langchain.py +388 -0
  59. openlit/instrumentation/langchain/langchain.py +110 -497
  60. openlit/instrumentation/letta/__init__.py +7 -7
  61. openlit/instrumentation/letta/letta.py +10 -8
  62. openlit/instrumentation/litellm/__init__.py +9 -10
  63. openlit/instrumentation/litellm/async_litellm.py +321 -250
  64. openlit/instrumentation/litellm/litellm.py +319 -248
  65. openlit/instrumentation/llamaindex/__init__.py +2 -2
  66. openlit/instrumentation/llamaindex/llamaindex.py +5 -5
  67. openlit/instrumentation/mem0/__init__.py +2 -2
  68. openlit/instrumentation/mem0/mem0.py +5 -5
  69. openlit/instrumentation/milvus/__init__.py +2 -2
  70. openlit/instrumentation/milvus/milvus.py +7 -7
  71. openlit/instrumentation/mistral/__init__.py +13 -13
  72. openlit/instrumentation/mistral/async_mistral.py +426 -253
  73. openlit/instrumentation/mistral/mistral.py +424 -250
  74. openlit/instrumentation/multion/__init__.py +7 -7
  75. openlit/instrumentation/multion/async_multion.py +9 -7
  76. openlit/instrumentation/multion/multion.py +9 -7
  77. openlit/instrumentation/ollama/__init__.py +19 -39
  78. openlit/instrumentation/ollama/async_ollama.py +137 -563
  79. openlit/instrumentation/ollama/ollama.py +136 -563
  80. openlit/instrumentation/ollama/utils.py +333 -0
  81. openlit/instrumentation/openai/__init__.py +11 -11
  82. openlit/instrumentation/openai/async_openai.py +25 -27
  83. openlit/instrumentation/openai/openai.py +25 -27
  84. openlit/instrumentation/phidata/__init__.py +2 -2
  85. openlit/instrumentation/phidata/phidata.py +6 -4
  86. openlit/instrumentation/pinecone/__init__.py +6 -6
  87. openlit/instrumentation/pinecone/pinecone.py +7 -7
  88. openlit/instrumentation/premai/__init__.py +5 -5
  89. openlit/instrumentation/premai/premai.py +268 -219
  90. openlit/instrumentation/qdrant/__init__.py +2 -2
  91. openlit/instrumentation/qdrant/async_qdrant.py +7 -7
  92. openlit/instrumentation/qdrant/qdrant.py +7 -7
  93. openlit/instrumentation/reka/__init__.py +5 -5
  94. openlit/instrumentation/reka/async_reka.py +93 -55
  95. openlit/instrumentation/reka/reka.py +93 -55
  96. openlit/instrumentation/together/__init__.py +9 -9
  97. openlit/instrumentation/together/async_together.py +284 -242
  98. openlit/instrumentation/together/together.py +284 -242
  99. openlit/instrumentation/transformers/__init__.py +3 -3
  100. openlit/instrumentation/transformers/transformers.py +79 -48
  101. openlit/instrumentation/vertexai/__init__.py +19 -69
  102. openlit/instrumentation/vertexai/async_vertexai.py +333 -990
  103. openlit/instrumentation/vertexai/vertexai.py +333 -990
  104. openlit/instrumentation/vllm/__init__.py +3 -3
  105. openlit/instrumentation/vllm/vllm.py +65 -35
  106. openlit/otel/events.py +85 -0
  107. openlit/otel/tracing.py +3 -13
  108. openlit/semcov/__init__.py +16 -4
  109. {openlit-1.33.9.dist-info → openlit-1.33.11.dist-info}/METADATA +2 -2
  110. openlit-1.33.11.dist-info/RECORD +125 -0
  111. openlit-1.33.9.dist-info/RECORD +0 -121
  112. {openlit-1.33.9.dist-info → openlit-1.33.11.dist-info}/LICENSE +0 -0
  113. {openlit-1.33.9.dist-info → openlit-1.33.11.dist-info}/WHEEL +0 -0
@@ -35,7 +35,7 @@ class LlamaIndexInstrumentor(BaseInstrumentor):
35
35
  environment = kwargs.get("environment")
36
36
  tracer = kwargs.get("tracer")
37
37
  pricing_info = kwargs.get("pricing_info")
38
- trace_content = kwargs.get("trace_content")
38
+ capture_message_content = kwargs.get("capture_message_content")
39
39
  version = importlib.metadata.version("llama-index")
40
40
 
41
41
  for wrapped_method in WRAPPED_METHODS:
@@ -47,7 +47,7 @@ class LlamaIndexInstrumentor(BaseInstrumentor):
47
47
  wrap_package,
48
48
  wrap_object,
49
49
  wrapper(gen_ai_endpoint, version, environment, application_name,
50
- tracer, pricing_info, trace_content),
50
+ tracer, pricing_info, capture_message_content),
51
51
  )
52
52
 
53
53
  @staticmethod
@@ -5,7 +5,7 @@ Module for monitoring LlamaIndex applications.
5
5
 
6
6
  import logging
7
7
  from opentelemetry.trace import SpanKind, Status, StatusCode
8
- from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
8
+ from opentelemetry.sdk.resources import SERVICE_NAME, TELEMETRY_SDK_NAME, DEPLOYMENT_ENVIRONMENT
9
9
  from openlit.__helpers import handle_exception
10
10
  from openlit.semcov import SemanticConvetion
11
11
 
@@ -13,7 +13,7 @@ from openlit.semcov import SemanticConvetion
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
  def load_data(gen_ai_endpoint, version, environment, application_name,
16
- tracer, pricing_info, trace_content):
16
+ tracer, pricing_info, capture_message_content):
17
17
  """
18
18
  Creates a wrapper around a function call to trace and log its execution metrics.
19
19
 
@@ -27,7 +27,7 @@ def load_data(gen_ai_endpoint, version, environment, application_name,
27
27
  - application_name (str): Name of the LlamaIndex application.
28
28
  - tracer (opentelemetry.trace.Tracer): The tracer object used for OpenTelemetry tracing.
29
29
  - pricing_info (dict): Information about the pricing for internal metrics (currently not used).
30
- - trace_content (bool): Flag indicating whether to trace the content of the response.
30
+ - capture_message_content (bool): Flag indicating whether to trace the content of the response.
31
31
 
32
32
  Returns:
33
33
  - function: A higher-order function that takes a function 'wrapped' and returns
@@ -63,11 +63,11 @@ def load_data(gen_ai_endpoint, version, environment, application_name,
63
63
  SemanticConvetion.GEN_AI_SYSTEM_LLAMAINDEX)
64
64
  span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
65
65
  gen_ai_endpoint)
66
- span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
66
+ span.set_attribute(DEPLOYMENT_ENVIRONMENT,
67
67
  environment)
68
68
  span.set_attribute(SemanticConvetion.GEN_AI_OPERATION,
69
69
  SemanticConvetion.GEN_AI_OPERATION_TYPE_FRAMEWORK)
70
- span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
70
+ span.set_attribute(SERVICE_NAME,
71
71
  application_name)
72
72
  span.set_attribute(SemanticConvetion.GEN_AI_RETRIEVAL_SOURCE,
73
73
  response[0].metadata["file_path"])
@@ -59,7 +59,7 @@ class Mem0Instrumentor(BaseInstrumentor):
59
59
  environment = kwargs.get("environment")
60
60
  tracer = kwargs.get("tracer")
61
61
  pricing_info = kwargs.get("pricing_info")
62
- trace_content = kwargs.get("trace_content")
62
+ capture_message_content = kwargs.get("capture_message_content")
63
63
  version = importlib.metadata.version("mem0ai")
64
64
 
65
65
  for wrapped_method in WRAPPED_METHODS:
@@ -71,7 +71,7 @@ class Mem0Instrumentor(BaseInstrumentor):
71
71
  wrap_package,
72
72
  wrap_object,
73
73
  wrapper(gen_ai_endpoint, version, environment, application_name,
74
- tracer, pricing_info, trace_content),
74
+ tracer, pricing_info, capture_message_content),
75
75
  )
76
76
 
77
77
  @staticmethod
@@ -5,7 +5,7 @@ Module for monitoring mem0 applications.
5
5
 
6
6
  import logging
7
7
  from opentelemetry.trace import SpanKind, Status, StatusCode
8
- from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
8
+ from opentelemetry.sdk.resources import SERVICE_NAME, TELEMETRY_SDK_NAME, DEPLOYMENT_ENVIRONMENT
9
9
  from openlit.__helpers import handle_exception
10
10
  from openlit.semcov import SemanticConvetion
11
11
 
@@ -13,7 +13,7 @@ from openlit.semcov import SemanticConvetion
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
  def mem0_wrap(gen_ai_endpoint, version, environment, application_name,
16
- tracer, pricing_info, trace_content):
16
+ tracer, pricing_info, capture_message_content):
17
17
  """
18
18
  Creates a wrapper around a function call to trace and log its execution metrics.
19
19
 
@@ -27,7 +27,7 @@ def mem0_wrap(gen_ai_endpoint, version, environment, application_name,
27
27
  - application_name (str): Name of the mem0 application.
28
28
  - tracer (opentelemetry.trace.Tracer): The tracer object used for OpenTelemetry tracing.
29
29
  - pricing_info (dict): Information about the pricing for internal metrics (currently not used).
30
- - trace_content (bool): Flag indicating whether to trace the content of the response.
30
+ - capture_message_content (bool): Flag indicating whether to trace the content of the response.
31
31
 
32
32
  Returns:
33
33
  - function: A higher-order function that takes a function 'wrapped' and returns
@@ -63,11 +63,11 @@ def mem0_wrap(gen_ai_endpoint, version, environment, application_name,
63
63
  SemanticConvetion.GEN_AI_SYSTEM_MEM0)
64
64
  span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
65
65
  gen_ai_endpoint)
66
- span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
66
+ span.set_attribute(DEPLOYMENT_ENVIRONMENT,
67
67
  environment)
68
68
  span.set_attribute(SemanticConvetion.GEN_AI_OPERATION,
69
69
  SemanticConvetion.GEN_AI_OPERATION_TYPE_FRAMEWORK)
70
- span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
70
+ span.set_attribute(SERVICE_NAME,
71
71
  application_name)
72
72
 
73
73
  if gen_ai_endpoint == "mem0.memory_add":
@@ -72,7 +72,7 @@ class MilvusInstrumentor(BaseInstrumentor):
72
72
  tracer = kwargs.get("tracer")
73
73
  metrics = kwargs.get("metrics_dict")
74
74
  pricing_info = kwargs.get("pricing_info")
75
- trace_content = kwargs.get("trace_content")
75
+ capture_message_content = kwargs.get("capture_message_content")
76
76
  disable_metrics = kwargs.get("disable_metrics")
77
77
  version = importlib.metadata.version("pymilvus")
78
78
 
@@ -85,7 +85,7 @@ class MilvusInstrumentor(BaseInstrumentor):
85
85
  wrap_package,
86
86
  wrap_object,
87
87
  wrapper(gen_ai_endpoint, version, environment, application_name,
88
- tracer, pricing_info, trace_content, metrics, disable_metrics),
88
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
89
89
  )
90
90
 
91
91
 
@@ -5,7 +5,7 @@ Module for monitoring Milvus.
5
5
 
6
6
  import logging
7
7
  from opentelemetry.trace import SpanKind, Status, StatusCode
8
- from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
8
+ from opentelemetry.sdk.resources import SERVICE_NAME, TELEMETRY_SDK_NAME, DEPLOYMENT_ENVIRONMENT
9
9
  from openlit.__helpers import handle_exception
10
10
  from openlit.semcov import SemanticConvetion
11
11
 
@@ -25,7 +25,7 @@ def object_count(obj):
25
25
  return cnt
26
26
 
27
27
  def general_wrap(gen_ai_endpoint, version, environment, application_name,
28
- tracer, pricing_info, trace_content, metrics, disable_metrics):
28
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics):
29
29
  """
30
30
  Creates a wrapper around a function call to trace and log its execution metrics.
31
31
 
@@ -39,7 +39,7 @@ def general_wrap(gen_ai_endpoint, version, environment, application_name,
39
39
  - application_name (str): Name of the Langchain application.
40
40
  - tracer (opentelemetry.trace.Tracer): The tracer object used for OpenTelemetry tracing.
41
41
  - pricing_info (dict): Information about the pricing for internal metrics (currently not used).
42
- - trace_content (bool): Flag indicating whether to trace the content of the response.
42
+ - capture_message_content (bool): Flag indicating whether to trace the content of the response.
43
43
 
44
44
  Returns:
45
45
  - function: A higher-order function that takes a function 'wrapped' and returns
@@ -73,9 +73,9 @@ def general_wrap(gen_ai_endpoint, version, environment, application_name,
73
73
  span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
74
74
  span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
75
75
  gen_ai_endpoint)
76
- span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
76
+ span.set_attribute(DEPLOYMENT_ENVIRONMENT,
77
77
  environment)
78
- span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
78
+ span.set_attribute(SERVICE_NAME,
79
79
  application_name)
80
80
  span.set_attribute(SemanticConvetion.GEN_AI_OPERATION,
81
81
  SemanticConvetion.GEN_AI_OPERATION_TYPE_VECTORDB)
@@ -153,11 +153,11 @@ def general_wrap(gen_ai_endpoint, version, environment, application_name,
153
153
  attributes = {
154
154
  TELEMETRY_SDK_NAME:
155
155
  "openlit",
156
- SemanticConvetion.GEN_AI_APPLICATION_NAME:
156
+ SERVICE_NAME:
157
157
  application_name,
158
158
  SemanticConvetion.DB_SYSTEM:
159
159
  SemanticConvetion.DB_SYSTEM_MILVUS,
160
- SemanticConvetion.GEN_AI_ENVIRONMENT:
160
+ DEPLOYMENT_ENVIRONMENT:
161
161
  environment,
162
162
  SemanticConvetion.GEN_AI_OPERATION:
163
163
  SemanticConvetion.GEN_AI_OPERATION_TYPE_VECTORDB,
@@ -23,7 +23,7 @@ class MistralInstrumentor(BaseInstrumentor):
23
23
  tracer = kwargs.get("tracer")
24
24
  metrics = kwargs.get("metrics_dict")
25
25
  pricing_info = kwargs.get("pricing_info")
26
- trace_content = kwargs.get("trace_content")
26
+ capture_message_content = kwargs.get("capture_message_content")
27
27
  disable_metrics = kwargs.get("disable_metrics")
28
28
  version = importlib.metadata.version("mistralai")
29
29
 
@@ -31,48 +31,48 @@ class MistralInstrumentor(BaseInstrumentor):
31
31
  wrap_function_wrapper(
32
32
  "mistralai.chat",
33
33
  "Chat.complete",
34
- chat("mistral.chat", version, environment, application_name,
35
- tracer, pricing_info, trace_content, metrics, disable_metrics),
34
+ chat(version, environment, application_name,
35
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
36
36
  )
37
37
 
38
38
  # sync
39
39
  wrap_function_wrapper(
40
40
  "mistralai.chat",
41
41
  "Chat.stream",
42
- chat_stream("mistral.chat", version, environment, application_name,
43
- tracer, pricing_info, trace_content, metrics, disable_metrics),
42
+ chat_stream(version, environment, application_name,
43
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
44
44
  )
45
45
 
46
46
  # sync
47
47
  wrap_function_wrapper(
48
48
  "mistralai.embeddings",
49
49
  "Embeddings.create",
50
- embeddings("mistral.embeddings", version, environment, application_name,
51
- tracer, pricing_info, trace_content, metrics, disable_metrics),
50
+ embeddings(version, environment, application_name,
51
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
52
52
  )
53
53
 
54
54
  # Async
55
55
  wrap_function_wrapper(
56
56
  "mistralai.chat",
57
57
  "Chat.complete_async",
58
- async_chat("mistral.chat", version, environment, application_name,
59
- tracer, pricing_info, trace_content, metrics, disable_metrics),
58
+ async_chat(version, environment, application_name,
59
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
60
60
  )
61
61
 
62
62
  # Async
63
63
  wrap_function_wrapper(
64
64
  "mistralai.chat",
65
65
  "Chat.stream_async",
66
- async_chat_stream("mistral.chat", version, environment, application_name,
67
- tracer, pricing_info, trace_content, metrics, disable_metrics),
66
+ async_chat_stream(version, environment, application_name,
67
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
68
68
  )
69
69
 
70
70
  #sync
71
71
  wrap_function_wrapper(
72
72
  "mistralai.embeddings",
73
73
  "Embeddings.create_async",
74
- async_embeddings("mistral.embeddings", version, environment, application_name,
75
- tracer, pricing_info, trace_content, metrics, disable_metrics),
74
+ async_embeddings(version, environment, application_name,
75
+ tracer, pricing_info, capture_message_content, metrics, disable_metrics),
76
76
  )
77
77
 
78
78
  @staticmethod