openlit 1.32.11__tar.gz → 1.33.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. {openlit-1.32.11 → openlit-1.33.1}/PKG-INFO +5 -4
  2. {openlit-1.32.11 → openlit-1.33.1}/README.md +3 -2
  3. {openlit-1.32.11 → openlit-1.33.1}/pyproject.toml +2 -2
  4. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/__init__.py +6 -1
  5. openlit-1.33.1/src/openlit/instrumentation/letta/__init__.py +77 -0
  6. openlit-1.33.1/src/openlit/instrumentation/letta/letta.py +186 -0
  7. openlit-1.33.1/src/openlit/instrumentation/together/__init__.py +70 -0
  8. openlit-1.33.1/src/openlit/instrumentation/together/async_together.py +558 -0
  9. openlit-1.33.1/src/openlit/instrumentation/together/together.py +558 -0
  10. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/semcov/__init__.py +2 -0
  11. {openlit-1.32.11 → openlit-1.33.1}/LICENSE +0 -0
  12. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/__helpers.py +0 -0
  13. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/evals/__init__.py +0 -0
  14. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/evals/all.py +0 -0
  15. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/evals/bias_detection.py +0 -0
  16. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/evals/hallucination.py +0 -0
  17. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/evals/toxicity.py +0 -0
  18. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/evals/utils.py +0 -0
  19. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/guard/__init__.py +0 -0
  20. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/guard/all.py +0 -0
  21. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/guard/prompt_injection.py +0 -0
  22. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/guard/restrict_topic.py +0 -0
  23. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/guard/sensitive_topic.py +0 -0
  24. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/guard/utils.py +0 -0
  25. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ag2/__init__.py +0 -0
  26. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ag2/ag2.py +0 -0
  27. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ai21/__init__.py +0 -0
  28. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ai21/ai21.py +0 -0
  29. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ai21/async_ai21.py +0 -0
  30. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
  31. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
  32. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
  33. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/assemblyai/__init__.py +0 -0
  34. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/assemblyai/assemblyai.py +0 -0
  35. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/astra/__init__.py +0 -0
  36. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/astra/astra.py +0 -0
  37. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/astra/async_astra.py +0 -0
  38. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/azure_ai_inference/__init__.py +0 -0
  39. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +0 -0
  40. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +0 -0
  41. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/bedrock/__init__.py +0 -0
  42. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/bedrock/bedrock.py +0 -0
  43. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/chroma/__init__.py +0 -0
  44. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/chroma/chroma.py +0 -0
  45. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/cohere/__init__.py +0 -0
  46. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/cohere/cohere.py +0 -0
  47. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/controlflow/__init__.py +0 -0
  48. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/controlflow/controlflow.py +0 -0
  49. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/crawl4ai/__init__.py +0 -0
  50. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/crawl4ai/async_crawl4ai.py +0 -0
  51. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/crawl4ai/crawl4ai.py +0 -0
  52. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/crewai/__init__.py +0 -0
  53. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/crewai/crewai.py +0 -0
  54. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/dynamiq/__init__.py +0 -0
  55. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/dynamiq/dynamiq.py +0 -0
  56. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/elevenlabs/__init__.py +0 -0
  57. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/elevenlabs/async_elevenlabs.py +0 -0
  58. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/elevenlabs/elevenlabs.py +0 -0
  59. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/embedchain/__init__.py +0 -0
  60. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/embedchain/embedchain.py +0 -0
  61. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/firecrawl/__init__.py +0 -0
  62. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/firecrawl/firecrawl.py +0 -0
  63. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/google_ai_studio/__init__.py +0 -0
  64. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +0 -0
  65. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/google_ai_studio/google_ai_studio.py +0 -0
  66. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/gpt4all/__init__.py +0 -0
  67. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/gpt4all/gpt4all.py +0 -0
  68. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/gpu/__init__.py +0 -0
  69. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/groq/__init__.py +0 -0
  70. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/groq/async_groq.py +0 -0
  71. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/groq/groq.py +0 -0
  72. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/haystack/__init__.py +0 -0
  73. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/haystack/haystack.py +0 -0
  74. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/julep/__init__.py +0 -0
  75. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/julep/async_julep.py +0 -0
  76. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/julep/julep.py +0 -0
  77. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/langchain/__init__.py +0 -0
  78. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/langchain/langchain.py +0 -0
  79. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/litellm/__init__.py +0 -0
  80. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/litellm/async_litellm.py +0 -0
  81. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/litellm/litellm.py +0 -0
  82. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/llamaindex/__init__.py +0 -0
  83. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/llamaindex/llamaindex.py +0 -0
  84. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/mem0/__init__.py +0 -0
  85. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/mem0/mem0.py +0 -0
  86. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/milvus/__init__.py +0 -0
  87. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/milvus/milvus.py +0 -0
  88. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/mistral/__init__.py +0 -0
  89. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
  90. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/mistral/mistral.py +0 -0
  91. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/multion/__init__.py +0 -0
  92. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/multion/async_multion.py +0 -0
  93. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/multion/multion.py +0 -0
  94. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ollama/__init__.py +0 -0
  95. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ollama/async_ollama.py +0 -0
  96. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/ollama/ollama.py +0 -0
  97. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/openai/__init__.py +0 -0
  98. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
  99. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/openai/async_openai.py +0 -0
  100. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
  101. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/openai/openai.py +0 -0
  102. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/phidata/__init__.py +0 -0
  103. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/phidata/phidata.py +0 -0
  104. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
  105. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
  106. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/premai/__init__.py +0 -0
  107. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/premai/premai.py +0 -0
  108. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/qdrant/__init__.py +0 -0
  109. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/qdrant/async_qdrant.py +0 -0
  110. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/qdrant/qdrant.py +0 -0
  111. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/reka/__init__.py +0 -0
  112. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/reka/async_reka.py +0 -0
  113. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/reka/reka.py +0 -0
  114. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/transformers/__init__.py +0 -0
  115. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/transformers/transformers.py +0 -0
  116. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/vertexai/__init__.py +0 -0
  117. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/vertexai/async_vertexai.py +0 -0
  118. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/vertexai/vertexai.py +0 -0
  119. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/vllm/__init__.py +0 -0
  120. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/instrumentation/vllm/vllm.py +0 -0
  121. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/otel/metrics.py +0 -0
  122. {openlit-1.32.11 → openlit-1.33.1}/src/openlit/otel/tracing.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.32.11
3
+ Version: 1.33.1
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -13,7 +13,7 @@ Classifier: Programming Language :: Python :: 3.10
13
13
  Classifier: Programming Language :: Python :: 3.11
14
14
  Classifier: Programming Language :: Python :: 3.12
15
15
  Classifier: Programming Language :: Python :: 3.13
16
- Requires-Dist: anthropic (>=0.21.0,<0.22.0)
16
+ Requires-Dist: anthropic (>=0.42.0,<0.43.0)
17
17
  Requires-Dist: boto3 (>=1.34.0,<2.0.0)
18
18
  Requires-Dist: botocore (>=1.34.0,<2.0.0)
19
19
  Requires-Dist: openai (>=1.1.1,<2.0.0)
@@ -56,7 +56,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
56
56
 
57
57
  ## ⚡ Features
58
58
 
59
- - 🔎 **Auto Instrumentation**: Works with 40+ LLM providers, Agents, Vector databases, and GPUs with just one line of code.
59
+ - 🔎 **Auto Instrumentation**: Works with 50+ LLM providers, Agents, Vector databases, and GPUs with just one line of code.
60
60
  - 🔭 **OpenTelemetry-Native Observability SDKs**: Vendor-neutral SDKs that can send traces and metrics to your existing observability tool like Prometheus and Jaeger.
61
61
  - 💲 **Cost Tracking for Custom and Fine-Tuned Models**: Pass custom pricing files for accurate budgeting of custom and fine-tuned models.
62
62
  - 🚀 **Suppport for OpenLIT Features**: Includes suppprt for prompt management and secrets management features available in OpenLIT.
@@ -82,13 +82,14 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
82
82
  | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
83
83
  | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | [✅ Crawl4AI](https://docs.openlit.io/latest/integrations/crawl4ai) | |
84
84
  | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | [✅ FireCrawl](https://docs.openlit.io/latest/integrations/firecrawl) | |
85
- | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
85
+ | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | [✅ Letta](https://docs.openlit.io/latest/integrations/letta) | |
86
86
  | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
87
87
  | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
88
88
  | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
89
89
  | [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
90
90
  | [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
91
91
  | [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
92
+ | [✅ Together](https://docs.openlit.io/latest/integrations/together) | | | |
92
93
 
93
94
  ## Supported Destinations
94
95
  - [✅ OpenTelemetry Collector](https://docs.openlit.io/latest/connections/otelcol)
@@ -25,7 +25,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
25
25
 
26
26
  ## ⚡ Features
27
27
 
28
- - 🔎 **Auto Instrumentation**: Works with 40+ LLM providers, Agents, Vector databases, and GPUs with just one line of code.
28
+ - 🔎 **Auto Instrumentation**: Works with 50+ LLM providers, Agents, Vector databases, and GPUs with just one line of code.
29
29
  - 🔭 **OpenTelemetry-Native Observability SDKs**: Vendor-neutral SDKs that can send traces and metrics to your existing observability tool like Prometheus and Jaeger.
30
30
  - 💲 **Cost Tracking for Custom and Fine-Tuned Models**: Pass custom pricing files for accurate budgeting of custom and fine-tuned models.
31
31
  - 🚀 **Suppport for OpenLIT Features**: Includes suppprt for prompt management and secrets management features available in OpenLIT.
@@ -51,13 +51,14 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
51
51
  | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
52
52
  | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | [✅ Crawl4AI](https://docs.openlit.io/latest/integrations/crawl4ai) | |
53
53
  | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | [✅ FireCrawl](https://docs.openlit.io/latest/integrations/firecrawl) | |
54
- | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
54
+ | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | [✅ Letta](https://docs.openlit.io/latest/integrations/letta) | |
55
55
  | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
56
56
  | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
57
57
  | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
58
58
  | [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
59
59
  | [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
60
60
  | [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
61
+ | [✅ Together](https://docs.openlit.io/latest/integrations/together) | | | |
61
62
 
62
63
  ## Supported Destinations
63
64
  - [✅ OpenTelemetry Collector](https://docs.openlit.io/latest/connections/otelcol)
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "openlit"
3
- version = "1.32.11"
3
+ version = "1.33.1"
4
4
  description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects"
5
5
  authors = ["OpenLIT"]
6
6
  repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
@@ -22,7 +22,7 @@ opentelemetry-sdk = "^1.27.0"
22
22
  opentelemetry-exporter-otlp = "^1.27.0"
23
23
  opentelemetry-instrumentation = "^0.48b0"
24
24
  openai = "^1.1.1"
25
- anthropic = "^0.21.0"
25
+ anthropic = "^0.42.0"
26
26
 
27
27
  [build-system]
28
28
  requires = ["poetry-core>=1.1.0"]
@@ -52,6 +52,7 @@ from openlit.instrumentation.milvus import MilvusInstrumentor
52
52
  from openlit.instrumentation.astra import AstraInstrumentor
53
53
  from openlit.instrumentation.transformers import TransformersInstrumentor
54
54
  from openlit.instrumentation.litellm import LiteLLMInstrumentor
55
+ from openlit.instrumentation.together import TogetherInstrumentor
55
56
  from openlit.instrumentation.crewai import CrewAIInstrumentor
56
57
  from openlit.instrumentation.ag2 import AG2Instrumentor
57
58
  from openlit.instrumentation.multion import MultiOnInstrumentor
@@ -62,6 +63,7 @@ from openlit.instrumentation.ai21 import AI21Instrumentor
62
63
  from openlit.instrumentation.controlflow import ControlFlowInstrumentor
63
64
  from openlit.instrumentation.crawl4ai import Crawl4AIInstrumentor
64
65
  from openlit.instrumentation.firecrawl import FireCrawlInstrumentor
66
+ from openlit.instrumentation.letta import LettaInstrumentor
65
67
  from openlit.instrumentation.gpu import GPUInstrumentor
66
68
  import openlit.guard
67
69
  import openlit.evals
@@ -187,7 +189,6 @@ def instrument_if_available(
187
189
  metrics_dict=config.metrics_dict,
188
190
  disable_metrics=config.disable_metrics,
189
191
  )
190
- logger.info("Instrumented %s", instrumentor_name)
191
192
  else:
192
193
  # pylint: disable=line-too-long
193
194
  logger.info("Library for %s (%s) not found. Skipping instrumentation", instrumentor_name, module_name)
@@ -262,6 +263,8 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
262
263
  "assemblyai": "assemblyai",
263
264
  "crawl4ai": "crawl4ai",
264
265
  "firecrawl": "firecrawl",
266
+ "letta": "letta",
267
+ "together": "together",
265
268
  }
266
269
 
267
270
  invalid_instrumentors = [
@@ -357,6 +360,8 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
357
360
  "assemblyai": AssemblyAIInstrumentor(),
358
361
  "crawl4ai": Crawl4AIInstrumentor(),
359
362
  "firecrawl": FireCrawlInstrumentor(),
363
+ "letta": LettaInstrumentor(),
364
+ "together": TogetherInstrumentor(),
360
365
  }
361
366
 
362
367
  # Initialize and instrument only the enabled instrumentors
@@ -0,0 +1,77 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of Letta Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.letta.letta import (
10
+ create_agent, send_message
11
+ )
12
+
13
+ _instruments = ("letta >= 0.6.2",)
14
+
15
+ class LettaInstrumentor(BaseInstrumentor):
16
+ """
17
+ An instrumentor for Letta's client library.
18
+ """
19
+
20
+ def instrumentation_dependencies(self) -> Collection[str]:
21
+ return _instruments
22
+
23
+ def _instrument(self, **kwargs):
24
+ application_name = kwargs.get("application_name", "default_application")
25
+ environment = kwargs.get("environment", "default_environment")
26
+ tracer = kwargs.get("tracer")
27
+ metrics = kwargs.get("metrics_dict")
28
+ pricing_info = kwargs.get("pricing_info", {})
29
+ trace_content = kwargs.get("trace_content", False)
30
+ disable_metrics = kwargs.get("disable_metrics")
31
+ version = importlib.metadata.version("letta")
32
+
33
+ wrap_function_wrapper(
34
+ "letta.client.client",
35
+ "LocalClient.create_agent",
36
+ create_agent("letta.create_agent", version, environment, application_name,
37
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
38
+ )
39
+
40
+ wrap_function_wrapper(
41
+ "letta.client.client",
42
+ "LocalClient.get_agent",
43
+ create_agent("letta.get_agent", version, environment, application_name,
44
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
45
+ )
46
+
47
+ wrap_function_wrapper(
48
+ "letta.client.client",
49
+ "LocalClient.send_message",
50
+ send_message("letta.send_message", version, environment, application_name,
51
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
52
+ )
53
+
54
+ wrap_function_wrapper(
55
+ "letta.client.client",
56
+ "RESTClient.create_agent",
57
+ create_agent("letta.create_agent", version, environment, application_name,
58
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
59
+ )
60
+
61
+ wrap_function_wrapper(
62
+ "letta.client.client",
63
+ "RESTClient.get_agent",
64
+ create_agent("letta.get_agent", version, environment, application_name,
65
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
66
+ )
67
+
68
+ wrap_function_wrapper(
69
+ "letta.client.client",
70
+ "RESTClient.send_message",
71
+ send_message("letta.send_message", version, environment, application_name,
72
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
73
+ )
74
+
75
+ def _uninstrument(self, **kwargs):
76
+ # Proper uninstrumentation logic to revert patched methods
77
+ pass
@@ -0,0 +1,186 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument, too-many-branches
2
+ """
3
+ Module for monitoring Letta calls.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import (
10
+ handle_exception, get_chat_model_cost
11
+ )
12
+ from openlit.semcov import SemanticConvetion
13
+
14
+ # Initialize logger for logging potential issues and operations
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def create_agent(gen_ai_endpoint, version, environment, application_name,
18
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
19
+ """
20
+ Generates a telemetry wrapper for chat completions to collect metrics.
21
+
22
+ Args:
23
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
24
+ version: Version of the monitoring package.
25
+ environment: Deployment environment (e.g., production, staging).
26
+ application_name: Name of the application using the Letta Agent.
27
+ tracer: OpenTelemetry tracer for creating spans.
28
+ pricing_info: Information used for calculating the cost of Letta usage.
29
+ trace_content: Flag indicating whether to trace the actual content.
30
+
31
+ Returns:
32
+ A function that wraps the chat completions method to add telemetry.
33
+ """
34
+
35
+ def wrapper(wrapped, instance, args, kwargs):
36
+ """
37
+ Wraps the API call to add telemetry.
38
+
39
+ This collects metrics such as execution time, cost, and token usage, and handles errors
40
+ gracefully, adding details to the trace for observability.
41
+
42
+ Args:
43
+ wrapped: The original method to be wrapped.
44
+ instance: The instance of the class where the original method is defined.
45
+ args: Positional arguments for the method.
46
+ kwargs: Keyword arguments for the method.
47
+
48
+ Returns:
49
+ The response from the original method.
50
+ """
51
+
52
+ # pylint: disable=line-too-long
53
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
54
+ response = wrapped(*args, **kwargs)
55
+
56
+ try:
57
+ # Set base span attribues
58
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
59
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
60
+ SemanticConvetion.GEN_AI_SYSTEM_LETTA)
61
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
62
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
+ application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ID,
68
+ response.id)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ROLE,
70
+ response.name)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_INSTRUCTIONS,
72
+ response.system)
73
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL,
74
+ response.llm_config.model)
75
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
76
+ response.agent_type)
77
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TOOLS,
78
+ response.tool_names)
79
+
80
+ span.set_status(Status(StatusCode.OK))
81
+
82
+ # Return original response
83
+ return response
84
+
85
+ except Exception as e:
86
+ handle_exception(span, e)
87
+ logger.error("Error in trace creation: %s", e)
88
+
89
+ # Return original response
90
+ return response
91
+
92
+ return wrapper
93
+
94
+ def send_message(gen_ai_endpoint, version, environment, application_name,
95
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
96
+ """
97
+ Generates a telemetry wrapper for chat completions to collect metrics.
98
+
99
+ Args:
100
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
101
+ version: Version of the monitoring package.
102
+ environment: Deployment environment (e.g., production, staging).
103
+ application_name: Name of the application using the Letta Agent.
104
+ tracer: OpenTelemetry tracer for creating spans.
105
+ pricing_info: Information used for calculating the cost of Letta usage.
106
+ trace_content: Flag indicating whether to trace the actual content.
107
+
108
+ Returns:
109
+ A function that wraps the chat completions method to add telemetry.
110
+ """
111
+
112
+ def wrapper(wrapped, instance, args, kwargs):
113
+ """
114
+ Wraps the API call to add telemetry.
115
+
116
+ This collects metrics such as execution time, cost, and token usage, and handles errors
117
+ gracefully, adding details to the trace for observability.
118
+
119
+ Args:
120
+ wrapped: The original method to be wrapped.
121
+ instance: The instance of the class where the original method is defined.
122
+ args: Positional arguments for the method.
123
+ kwargs: Keyword arguments for the method.
124
+
125
+ Returns:
126
+ The response from the original method.
127
+ """
128
+
129
+ # pylint: disable=line-too-long
130
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
131
+ response = wrapped(*args, **kwargs)
132
+
133
+ try:
134
+ # Calculate cost of the operation
135
+ cost = get_chat_model_cost(kwargs.get("model", "gpt-4o"),
136
+ pricing_info, response.usage.prompt_tokens,
137
+ response.usage.completion_tokens)
138
+ # Set base span attribues
139
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
140
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
141
+ SemanticConvetion.GEN_AI_SYSTEM_LETTA)
142
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
143
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
144
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
145
+ gen_ai_endpoint)
146
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
147
+ application_name)
148
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_STEP_COUNT,
149
+ response.usage.step_count)
150
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_PROMPT_TOKENS,
151
+ response.usage.prompt_tokens)
152
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_COMPLETION_TOKENS,
153
+ response.usage.completion_tokens)
154
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_TOTAL_TOKENS,
155
+ response.usage.total_tokens)
156
+ span.set_attribute(SemanticConvetion.GEN_AI_USAGE_COST,
157
+ cost)
158
+
159
+ if trace_content:
160
+ span.add_event(
161
+ name=SemanticConvetion.GEN_AI_CONTENT_PROMPT_EVENT,
162
+ attributes={
163
+ SemanticConvetion.GEN_AI_CONTENT_PROMPT: kwargs.get("message", ""),
164
+ },
165
+ )
166
+ span.add_event(
167
+ name=SemanticConvetion.GEN_AI_CONTENT_COMPLETION_EVENT,
168
+ # pylint: disable=line-too-long
169
+ attributes={
170
+ SemanticConvetion.GEN_AI_CONTENT_COMPLETION: str(response.messages),
171
+ },
172
+ )
173
+
174
+ span.set_status(Status(StatusCode.OK))
175
+
176
+ # Return original response
177
+ return response
178
+
179
+ except Exception as e:
180
+ handle_exception(span, e)
181
+ logger.error("Error in trace creation: %s", e)
182
+
183
+ # Return original response
184
+ return response
185
+
186
+ return wrapper
@@ -0,0 +1,70 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of Together AI Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.together.together import (
10
+ completion, image_generate
11
+ )
12
+ from openlit.instrumentation.together.async_together import (
13
+ async_completion, async_image_generate
14
+ )
15
+
16
+ _instruments = ("together >= 1.3.5",)
17
+
18
+ class TogetherInstrumentor(BaseInstrumentor):
19
+ """
20
+ An instrumentor for Together's client library.
21
+ """
22
+
23
+ def instrumentation_dependencies(self) -> Collection[str]:
24
+ return _instruments
25
+
26
+ def _instrument(self, **kwargs):
27
+ application_name = kwargs.get("application_name", "default_application")
28
+ environment = kwargs.get("environment", "default_environment")
29
+ tracer = kwargs.get("tracer")
30
+ metrics = kwargs.get("metrics_dict")
31
+ pricing_info = kwargs.get("pricing_info", {})
32
+ trace_content = kwargs.get("trace_content", False)
33
+ disable_metrics = kwargs.get("disable_metrics")
34
+ version = importlib.metadata.version("together")
35
+
36
+ # Chat completions
37
+ wrap_function_wrapper(
38
+ "together.resources.chat.completions",
39
+ "ChatCompletions.create",
40
+ completion("together.chat.completions", version, environment, application_name,
41
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
42
+ )
43
+
44
+ # Image generate
45
+ wrap_function_wrapper(
46
+ "together.resources.images",
47
+ "Images.generate",
48
+ image_generate("together.image.generate", version, environment, application_name,
49
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
50
+ )
51
+
52
+ # Chat completions
53
+ wrap_function_wrapper(
54
+ "together.resources.chat.completions",
55
+ "AsyncChatCompletions.create",
56
+ async_completion("together.chat.completions", version, environment, application_name,
57
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
58
+ )
59
+
60
+ # Image generate
61
+ wrap_function_wrapper(
62
+ "together.resources.images",
63
+ "AsyncImages.generate",
64
+ async_image_generate("together.image.generate", version, environment, application_name,
65
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
66
+ )
67
+
68
+ def _uninstrument(self, **kwargs):
69
+ # Proper uninstrumentation logic to revert patched methods
70
+ pass