openlit 1.32.9__tar.gz → 1.32.12__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. {openlit-1.32.9 → openlit-1.32.12}/PKG-INFO +7 -7
  2. {openlit-1.32.9 → openlit-1.32.12}/README.md +6 -6
  3. {openlit-1.32.9 → openlit-1.32.12}/pyproject.toml +1 -1
  4. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/__init__.py +9 -0
  5. openlit-1.32.12/src/openlit/instrumentation/crawl4ai/__init__.py +52 -0
  6. openlit-1.32.12/src/openlit/instrumentation/crawl4ai/async_crawl4ai.py +104 -0
  7. openlit-1.32.12/src/openlit/instrumentation/crawl4ai/crawl4ai.py +104 -0
  8. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/dynamiq/dynamiq.py +2 -0
  9. openlit-1.32.12/src/openlit/instrumentation/firecrawl/__init__.py +49 -0
  10. openlit-1.32.12/src/openlit/instrumentation/firecrawl/firecrawl.py +90 -0
  11. openlit-1.32.12/src/openlit/instrumentation/letta/__init__.py +77 -0
  12. openlit-1.32.12/src/openlit/instrumentation/letta/letta.py +186 -0
  13. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/semcov/__init__.py +8 -0
  14. {openlit-1.32.9 → openlit-1.32.12}/LICENSE +0 -0
  15. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/__helpers.py +0 -0
  16. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/evals/__init__.py +0 -0
  17. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/evals/all.py +0 -0
  18. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/evals/bias_detection.py +0 -0
  19. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/evals/hallucination.py +0 -0
  20. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/evals/toxicity.py +0 -0
  21. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/evals/utils.py +0 -0
  22. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/guard/__init__.py +0 -0
  23. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/guard/all.py +0 -0
  24. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/guard/prompt_injection.py +0 -0
  25. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/guard/restrict_topic.py +0 -0
  26. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/guard/sensitive_topic.py +0 -0
  27. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/guard/utils.py +0 -0
  28. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ag2/__init__.py +0 -0
  29. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ag2/ag2.py +0 -0
  30. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ai21/__init__.py +0 -0
  31. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ai21/ai21.py +0 -0
  32. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ai21/async_ai21.py +0 -0
  33. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
  34. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
  35. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
  36. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/assemblyai/__init__.py +0 -0
  37. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/assemblyai/assemblyai.py +0 -0
  38. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/astra/__init__.py +0 -0
  39. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/astra/astra.py +0 -0
  40. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/astra/async_astra.py +0 -0
  41. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/azure_ai_inference/__init__.py +0 -0
  42. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/azure_ai_inference/async_azure_ai_inference.py +0 -0
  43. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/azure_ai_inference/azure_ai_inference.py +0 -0
  44. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/bedrock/__init__.py +0 -0
  45. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/bedrock/bedrock.py +0 -0
  46. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/chroma/__init__.py +0 -0
  47. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/chroma/chroma.py +0 -0
  48. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/cohere/__init__.py +0 -0
  49. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/cohere/cohere.py +0 -0
  50. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/controlflow/__init__.py +0 -0
  51. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/controlflow/controlflow.py +0 -0
  52. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/crewai/__init__.py +0 -0
  53. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/crewai/crewai.py +0 -0
  54. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/dynamiq/__init__.py +0 -0
  55. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/elevenlabs/__init__.py +0 -0
  56. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/elevenlabs/async_elevenlabs.py +0 -0
  57. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/elevenlabs/elevenlabs.py +0 -0
  58. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/embedchain/__init__.py +0 -0
  59. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/embedchain/embedchain.py +0 -0
  60. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/google_ai_studio/__init__.py +0 -0
  61. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/google_ai_studio/async_google_ai_studio.py +0 -0
  62. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/google_ai_studio/google_ai_studio.py +0 -0
  63. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/gpt4all/__init__.py +0 -0
  64. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/gpt4all/gpt4all.py +0 -0
  65. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/gpu/__init__.py +0 -0
  66. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/groq/__init__.py +0 -0
  67. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/groq/async_groq.py +0 -0
  68. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/groq/groq.py +0 -0
  69. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/haystack/__init__.py +0 -0
  70. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/haystack/haystack.py +0 -0
  71. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/julep/__init__.py +0 -0
  72. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/julep/async_julep.py +0 -0
  73. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/julep/julep.py +0 -0
  74. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/langchain/__init__.py +0 -0
  75. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/langchain/langchain.py +0 -0
  76. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/litellm/__init__.py +0 -0
  77. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/litellm/async_litellm.py +0 -0
  78. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/litellm/litellm.py +0 -0
  79. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/llamaindex/__init__.py +0 -0
  80. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/llamaindex/llamaindex.py +0 -0
  81. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/mem0/__init__.py +0 -0
  82. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/mem0/mem0.py +0 -0
  83. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/milvus/__init__.py +0 -0
  84. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/milvus/milvus.py +0 -0
  85. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/mistral/__init__.py +0 -0
  86. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
  87. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/mistral/mistral.py +0 -0
  88. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/multion/__init__.py +0 -0
  89. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/multion/async_multion.py +0 -0
  90. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/multion/multion.py +0 -0
  91. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ollama/__init__.py +0 -0
  92. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ollama/async_ollama.py +0 -0
  93. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/ollama/ollama.py +0 -0
  94. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/openai/__init__.py +0 -0
  95. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
  96. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/openai/async_openai.py +0 -0
  97. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
  98. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/openai/openai.py +0 -0
  99. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/phidata/__init__.py +0 -0
  100. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/phidata/phidata.py +0 -0
  101. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
  102. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
  103. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/premai/__init__.py +0 -0
  104. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/premai/premai.py +0 -0
  105. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/qdrant/__init__.py +0 -0
  106. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/qdrant/async_qdrant.py +0 -0
  107. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/qdrant/qdrant.py +0 -0
  108. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/reka/__init__.py +0 -0
  109. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/reka/async_reka.py +0 -0
  110. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/reka/reka.py +0 -0
  111. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/transformers/__init__.py +0 -0
  112. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/transformers/transformers.py +0 -0
  113. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/vertexai/__init__.py +0 -0
  114. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/vertexai/async_vertexai.py +0 -0
  115. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/vertexai/vertexai.py +0 -0
  116. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/vllm/__init__.py +0 -0
  117. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/instrumentation/vllm/vllm.py +0 -0
  118. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/otel/metrics.py +0 -0
  119. {openlit-1.32.9 → openlit-1.32.12}/src/openlit/otel/tracing.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 1.32.9
3
+ Version: 1.32.12
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects
5
5
  Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT,gpu
@@ -69,7 +69,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
69
69
  | [✅ Ollama](https://docs.openlit.io/latest/integrations/ollama) | [✅ Pinecone](https://docs.openlit.io/latest/integrations/pinecone) | [✅ LiteLLM](https://docs.openlit.io/latest/integrations/litellm) | [✅ AMD](https://docs.openlit.io/latest/integrations/amd-gpu) |
70
70
  | [✅ Anthropic](https://docs.openlit.io/latest/integrations/anthropic) | [✅ Qdrant](https://docs.openlit.io/latest/integrations/qdrant) | [✅ LlamaIndex](https://docs.openlit.io/latest/integrations/llama-index) | |
71
71
  | [✅ GPT4All](https://docs.openlit.io/latest/integrations/gpt4all) | [✅ Milvus](https://docs.openlit.io/latest/integrations/milvus) | [✅ Haystack](https://docs.openlit.io/latest/integrations/haystack) | |
72
- | [✅ Cohere](https://docs.openlit.io/latest/integrations/cohere) | [✅ AstraDB](https://docs.openlit.io/latest/integrations/astradb) | [✅ EmbedChain](https://docs.openlit.io/latest/integrations/embedchain) | |
72
+ | [✅ Cohere](https://docs.openlit.io/latest/integrations/cohere) | [✅ AstraDB](https://docs.openlit.io/latest/integrations/astradb) | [✅ EmbedChain](https://docs.openlit.io/latest/integrations/embedchain) | |
73
73
  | [✅ Mistral](https://docs.openlit.io/latest/integrations/mistral) | | [✅ Guardrails](https://docs.openlit.io/latest/integrations/guardrails) | |
74
74
  | [✅ Azure OpenAI](https://docs.openlit.io/latest/integrations/azure-openai) | | [✅ CrewAI](https://docs.openlit.io/latest/integrations/crewai) | |
75
75
  | [✅ Azure AI Inference](https://docs.openlit.io/latest/integrations/azure-ai-inference) | | [✅ DSPy](https://docs.openlit.io/latest/integrations/dspy) | |
@@ -80,15 +80,15 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
80
80
  | [✅ Vertex AI](https://docs.openlit.io/latest/integrations/vertexai) | | [✅ MultiOn](https://docs.openlit.io/latest/integrations/multion) | |
81
81
  | [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
82
82
  | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
83
- | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | | |
84
- | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
85
- | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
83
+ | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | [✅ Crawl4AI](https://docs.openlit.io/latest/integrations/crawl4ai) | |
84
+ | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | [✅ FireCrawl](https://docs.openlit.io/latest/integrations/firecrawl) | |
85
+ | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | [✅ Letta](https://docs.openlit.io/latest/integrations/letta) | |
86
86
  | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
87
87
  | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
88
88
  | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
89
89
  | [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
90
90
  | [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
91
- | [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
91
+ | [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
92
92
 
93
93
  ## Supported Destinations
94
94
  - [✅ OpenTelemetry Collector](https://docs.openlit.io/latest/connections/otelcol)
@@ -260,7 +260,7 @@ We are dedicated to continuously improving OpenLIT SDKs. Here's a look at what's
260
260
  | [OpenTelemetry-native auto-instrumentation for NVIDIA GPU Monitoring](https://docs.openlit.io/latest/features/gpu) | ✅ Completed |
261
261
  | [Real-Time Guardrails Implementation](https://docs.openlit.io/latest/features/guardrails) | ✅ Completed |
262
262
  | [Programmatic Evaluation for LLM Response](https://docs.openlit.io/latest/features/evaluations) | ✅ Completed |
263
- | [OpenTelmetry auto-instrumentation for Agent Frameworks like CrewAI, DsPy]() | 🔜 Coming Soon |
263
+ | [OpenTelemetry-native AI Agent Observability]() | Completed |
264
264
 
265
265
 
266
266
  ## 🌱 Contributing
@@ -38,7 +38,7 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
38
38
  | [✅ Ollama](https://docs.openlit.io/latest/integrations/ollama) | [✅ Pinecone](https://docs.openlit.io/latest/integrations/pinecone) | [✅ LiteLLM](https://docs.openlit.io/latest/integrations/litellm) | [✅ AMD](https://docs.openlit.io/latest/integrations/amd-gpu) |
39
39
  | [✅ Anthropic](https://docs.openlit.io/latest/integrations/anthropic) | [✅ Qdrant](https://docs.openlit.io/latest/integrations/qdrant) | [✅ LlamaIndex](https://docs.openlit.io/latest/integrations/llama-index) | |
40
40
  | [✅ GPT4All](https://docs.openlit.io/latest/integrations/gpt4all) | [✅ Milvus](https://docs.openlit.io/latest/integrations/milvus) | [✅ Haystack](https://docs.openlit.io/latest/integrations/haystack) | |
41
- | [✅ Cohere](https://docs.openlit.io/latest/integrations/cohere) | [✅ AstraDB](https://docs.openlit.io/latest/integrations/astradb) | [✅ EmbedChain](https://docs.openlit.io/latest/integrations/embedchain) | |
41
+ | [✅ Cohere](https://docs.openlit.io/latest/integrations/cohere) | [✅ AstraDB](https://docs.openlit.io/latest/integrations/astradb) | [✅ EmbedChain](https://docs.openlit.io/latest/integrations/embedchain) | |
42
42
  | [✅ Mistral](https://docs.openlit.io/latest/integrations/mistral) | | [✅ Guardrails](https://docs.openlit.io/latest/integrations/guardrails) | |
43
43
  | [✅ Azure OpenAI](https://docs.openlit.io/latest/integrations/azure-openai) | | [✅ CrewAI](https://docs.openlit.io/latest/integrations/crewai) | |
44
44
  | [✅ Azure AI Inference](https://docs.openlit.io/latest/integrations/azure-ai-inference) | | [✅ DSPy](https://docs.openlit.io/latest/integrations/dspy) | |
@@ -49,15 +49,15 @@ This project proudly follows and maintains the [Semantic Conventions](https://gi
49
49
  | [✅ Vertex AI](https://docs.openlit.io/latest/integrations/vertexai) | | [✅ MultiOn](https://docs.openlit.io/latest/integrations/multion) | |
50
50
  | [✅ Groq](https://docs.openlit.io/latest/integrations/groq) | | [✅ Julep AI](https://docs.openlit.io/latest/integrations/julep-ai) | |
51
51
  | [✅ ElevenLabs](https://docs.openlit.io/latest/integrations/elevenlabs) | | [✅ ControlFlow](https://docs.openlit.io/latest/integrations/controlflow) | |
52
- | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | | |
53
- | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | | |
54
- | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | | |
52
+ | [✅ vLLM](https://docs.openlit.io/latest/integrations/vllm) | | [✅ Crawl4AI](https://docs.openlit.io/latest/integrations/crawl4ai) | |
53
+ | [✅ OLA Krutrim](https://docs.openlit.io/latest/integrations/krutrim) | | [✅ FireCrawl](https://docs.openlit.io/latest/integrations/firecrawl) | |
54
+ | [✅ Google AI Studio](https://docs.openlit.io/latest/integrations/google-ai-studio) | | [✅ Letta](https://docs.openlit.io/latest/integrations/letta) | |
55
55
  | [✅ NVIDIA NIM](https://docs.openlit.io/latest/integrations/nvidia-nim) | | | |
56
56
  | [✅ Titan ML](https://docs.openlit.io/latest/integrations/titan-ml) | | | |
57
57
  | [✅ Reka AI](https://docs.openlit.io/latest/integrations/reka) | | | |
58
58
  | [✅ xAI](https://docs.openlit.io/latest/integrations/xai) | | | |
59
59
  | [✅ Prem AI](https://docs.openlit.io/latest/integrations/premai) | | | |
60
- | [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
60
+ | [✅ Assembly AI](https://docs.openlit.io/latest/integrations/assemblyai) | | | |
61
61
 
62
62
  ## Supported Destinations
63
63
  - [✅ OpenTelemetry Collector](https://docs.openlit.io/latest/connections/otelcol)
@@ -229,7 +229,7 @@ We are dedicated to continuously improving OpenLIT SDKs. Here's a look at what's
229
229
  | [OpenTelemetry-native auto-instrumentation for NVIDIA GPU Monitoring](https://docs.openlit.io/latest/features/gpu) | ✅ Completed |
230
230
  | [Real-Time Guardrails Implementation](https://docs.openlit.io/latest/features/guardrails) | ✅ Completed |
231
231
  | [Programmatic Evaluation for LLM Response](https://docs.openlit.io/latest/features/evaluations) | ✅ Completed |
232
- | [OpenTelmetry auto-instrumentation for Agent Frameworks like CrewAI, DsPy]() | 🔜 Coming Soon |
232
+ | [OpenTelemetry-native AI Agent Observability]() | Completed |
233
233
 
234
234
 
235
235
  ## 🌱 Contributing
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "openlit"
3
- version = "1.32.9"
3
+ version = "1.32.12"
4
4
  description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications and GPUs, facilitating the integration of observability into your GenAI-driven projects"
5
5
  authors = ["OpenLIT"]
6
6
  repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
@@ -60,6 +60,9 @@ from openlit.instrumentation.phidata import PhidataInstrumentor
60
60
  from openlit.instrumentation.julep import JulepInstrumentor
61
61
  from openlit.instrumentation.ai21 import AI21Instrumentor
62
62
  from openlit.instrumentation.controlflow import ControlFlowInstrumentor
63
+ from openlit.instrumentation.crawl4ai import Crawl4AIInstrumentor
64
+ from openlit.instrumentation.firecrawl import FireCrawlInstrumentor
65
+ from openlit.instrumentation.letta import LettaInstrumentor
63
66
  from openlit.instrumentation.gpu import GPUInstrumentor
64
67
  import openlit.guard
65
68
  import openlit.evals
@@ -258,6 +261,9 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
258
261
  "ai21": "ai21",
259
262
  "controlflow": "controlflow",
260
263
  "assemblyai": "assemblyai",
264
+ "crawl4ai": "crawl4ai",
265
+ "firecrawl": "firecrawl",
266
+ "letta": "letta",
261
267
  }
262
268
 
263
269
  invalid_instrumentors = [
@@ -351,6 +357,9 @@ def init(environment="default", application_name="default", tracer=None, otlp_en
351
357
  "ai21": AI21Instrumentor(),
352
358
  "controlflow": ControlFlowInstrumentor(),
353
359
  "assemblyai": AssemblyAIInstrumentor(),
360
+ "crawl4ai": Crawl4AIInstrumentor(),
361
+ "firecrawl": FireCrawlInstrumentor(),
362
+ "letta": LettaInstrumentor(),
354
363
  }
355
364
 
356
365
  # Initialize and instrument only the enabled instrumentors
@@ -0,0 +1,52 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of Crawl4AI Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.crawl4ai.crawl4ai import (
10
+ wrap_crawl
11
+ )
12
+ from openlit.instrumentation.crawl4ai.async_crawl4ai import (
13
+ async_wrap_crawl
14
+ )
15
+
16
+ _instruments = ("crawl4ai >= 0.4.0",)
17
+
18
+ class Crawl4AIInstrumentor(BaseInstrumentor):
19
+ """
20
+ An instrumentor for crawl4ai's client library.
21
+ """
22
+
23
+ def instrumentation_dependencies(self) -> Collection[str]:
24
+ return _instruments
25
+
26
+ def _instrument(self, **kwargs):
27
+ application_name = kwargs.get("application_name", "default_application")
28
+ environment = kwargs.get("environment", "default_environment")
29
+ tracer = kwargs.get("tracer")
30
+ metrics = kwargs.get("metrics_dict")
31
+ pricing_info = kwargs.get("pricing_info", {})
32
+ trace_content = kwargs.get("trace_content", False)
33
+ disable_metrics = kwargs.get("disable_metrics")
34
+ version = importlib.metadata.version("crawl4ai")
35
+
36
+ wrap_function_wrapper(
37
+ "crawl4ai.web_crawler",
38
+ "WebCrawler.run",
39
+ wrap_crawl("crawl4ai.web_crawl", version, environment, application_name,
40
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
41
+ )
42
+
43
+ wrap_function_wrapper(
44
+ "crawl4ai.async_webcrawler",
45
+ "AsyncWebCrawler.arun",
46
+ async_wrap_crawl("crawl4ai.web_crawl", version, environment, application_name,
47
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
48
+ )
49
+
50
+ def _uninstrument(self, **kwargs):
51
+ # Proper uninstrumentation logic to revert patched methods
52
+ pass
@@ -0,0 +1,104 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument
2
+ """
3
+ Module for monitoring Crawl4AI calls.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import (
10
+ handle_exception,
11
+ )
12
+ from openlit.semcov import SemanticConvetion
13
+
14
+ # Initialize logger for logging potential issues and operations
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def async_wrap_crawl(gen_ai_endpoint, version, environment, application_name,
18
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
19
+ """
20
+ Generates a telemetry wrapper for chat completions to collect metrics.
21
+
22
+ Args:
23
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
24
+ version: Version of the monitoring package.
25
+ environment: Deployment environment (e.g., production, staging).
26
+ application_name: Name of the application using the Crawl4AI Agent.
27
+ tracer: OpenTelemetry tracer for creating spans.
28
+ pricing_info: Information used for calculating the cost of Crawl4AI usage.
29
+ trace_content: Flag indicating whether to trace the actual content.
30
+
31
+ Returns:
32
+ A function that wraps the chat completions method to add telemetry.
33
+ """
34
+
35
+ async def wrapper(wrapped, instance, args, kwargs):
36
+ """
37
+ Wraps the 'chat.completions' API call to add telemetry.
38
+
39
+ This collects metrics such as execution time, cost, and token usage, and handles errors
40
+ gracefully, adding details to the trace for observability.
41
+
42
+ Args:
43
+ wrapped: The original 'chat.completions' method to be wrapped.
44
+ instance: The instance of the class where the original method is defined.
45
+ args: Positional arguments for the 'chat.completions' method.
46
+ kwargs: Keyword arguments for the 'chat.completions' method.
47
+
48
+ Returns:
49
+ The response from the original 'chat.completions' method.
50
+ """
51
+
52
+ # pylint: disable=line-too-long
53
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
54
+ response = await wrapped(*args, **kwargs)
55
+
56
+ try:
57
+ # Set base span attribues
58
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
59
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
60
+ SemanticConvetion.GEN_AI_SYSTEM_CRAWL4AI)
61
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
62
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
+ application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
70
+ SemanticConvetion.GEN_AI_AGENT_TYPE_BROWSER)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ENABLE_CACHE, not kwargs.get("disable_cache", False))
72
+
73
+ url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
74
+ if url is not None:
75
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_BROWSE_URL, url)
76
+
77
+ extraction_strategy = kwargs.get("extraction_strategy", "NoExtractionStrategy")
78
+ extraction_name = extraction_strategy.name if hasattr(extraction_strategy, 'name') else extraction_strategy
79
+
80
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_STRATEGY, extraction_name)
81
+
82
+ if extraction_name == "LLMExtractionStrategy" and hasattr(extraction_strategy, 'provider'):
83
+ _, llm_model = extraction_strategy.provider.split('/')
84
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, llm_model)
85
+
86
+ elif extraction_name == "CosineStrategy":
87
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, "all-MiniLM-L6-v2")
88
+
89
+ elif extraction_name == "JsonCssExtractionStrategy" and hasattr(extraction_strategy, 'schema'):
90
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_SCHEMA, str(extraction_strategy.schema))
91
+
92
+ span.set_status(Status(StatusCode.OK))
93
+
94
+ # Return original response
95
+ return response
96
+
97
+ except Exception as e:
98
+ handle_exception(span, e)
99
+ logger.error("Error in trace creation: %s", e)
100
+
101
+ # Return original response
102
+ return response
103
+
104
+ return wrapper
@@ -0,0 +1,104 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument
2
+ """
3
+ Module for monitoring Crawl4AI calls.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import (
10
+ handle_exception,
11
+ )
12
+ from openlit.semcov import SemanticConvetion
13
+
14
+ # Initialize logger for logging potential issues and operations
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def wrap_crawl(gen_ai_endpoint, version, environment, application_name,
18
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
19
+ """
20
+ Generates a telemetry wrapper for chat completions to collect metrics.
21
+
22
+ Args:
23
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
24
+ version: Version of the monitoring package.
25
+ environment: Deployment environment (e.g., production, staging).
26
+ application_name: Name of the application using the Crawl4AI Agent.
27
+ tracer: OpenTelemetry tracer for creating spans.
28
+ pricing_info: Information used for calculating the cost of Crawl4AI usage.
29
+ trace_content: Flag indicating whether to trace the actual content.
30
+
31
+ Returns:
32
+ A function that wraps the chat completions method to add telemetry.
33
+ """
34
+
35
+ def wrapper(wrapped, instance, args, kwargs):
36
+ """
37
+ Wraps the 'chat.completions' API call to add telemetry.
38
+
39
+ This collects metrics such as execution time, cost, and token usage, and handles errors
40
+ gracefully, adding details to the trace for observability.
41
+
42
+ Args:
43
+ wrapped: The original 'chat.completions' method to be wrapped.
44
+ instance: The instance of the class where the original method is defined.
45
+ args: Positional arguments for the 'chat.completions' method.
46
+ kwargs: Keyword arguments for the 'chat.completions' method.
47
+
48
+ Returns:
49
+ The response from the original 'chat.completions' method.
50
+ """
51
+
52
+ # pylint: disable=line-too-long
53
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
54
+ response = wrapped(*args, **kwargs)
55
+
56
+ try:
57
+ # Set base span attribues
58
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
59
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
60
+ SemanticConvetion.GEN_AI_SYSTEM_CRAWL4AI)
61
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
62
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
+ application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
70
+ SemanticConvetion.GEN_AI_AGENT_TYPE_BROWSER)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ENABLE_CACHE, not kwargs.get("disable_cache", False))
72
+
73
+ url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
74
+ if url is not None:
75
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_BROWSE_URL, url)
76
+
77
+ extraction_strategy = kwargs.get("extraction_strategy", "NoExtractionStrategy")
78
+ extraction_name = extraction_strategy.name if hasattr(extraction_strategy, 'name') else extraction_strategy
79
+
80
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_STRATEGY, extraction_name)
81
+
82
+ if extraction_name == "LLMExtractionStrategy" and hasattr(extraction_strategy, 'provider'):
83
+ _, llm_model = extraction_strategy.provider.split('/')
84
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, llm_model)
85
+
86
+ elif extraction_name == "CosineStrategy":
87
+ span.set_attribute(SemanticConvetion.GEN_AI_REQUEST_MODEL, "all-MiniLM-L6-v2")
88
+
89
+ elif extraction_name == "JsonCssExtractionStrategy" and hasattr(extraction_strategy, 'schema'):
90
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_SCHEMA, str(extraction_strategy.schema))
91
+
92
+ span.set_status(Status(StatusCode.OK))
93
+
94
+ # Return original response
95
+ return response
96
+
97
+ except Exception as e:
98
+ handle_exception(span, e)
99
+ logger.error("Error in trace creation: %s", e)
100
+
101
+ # Return original response
102
+ return response
103
+
104
+ return wrapper
@@ -64,6 +64,8 @@ def dynamiq_wrap(gen_ai_endpoint, version, environment, application_name,
64
64
  gen_ai_endpoint)
65
65
  span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
66
  application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
67
69
 
68
70
  if gen_ai_endpoint == "dynamiq.agent_run":
69
71
  span.set_attribute(SemanticConvetion.GEN_AI_AGENT_ID,
@@ -0,0 +1,49 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of FireCrawl Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.firecrawl.firecrawl import (
10
+ wrap_crawl
11
+ )
12
+
13
+ _instruments = ("firecrawl-py >= 1.6.3",)
14
+
15
+ class FireCrawlInstrumentor(BaseInstrumentor):
16
+ """
17
+ An instrumentor for firecrawl's client library.
18
+ """
19
+
20
+ def instrumentation_dependencies(self) -> Collection[str]:
21
+ return _instruments
22
+
23
+ def _instrument(self, **kwargs):
24
+ application_name = kwargs.get("application_name", "default_application")
25
+ environment = kwargs.get("environment", "default_environment")
26
+ tracer = kwargs.get("tracer")
27
+ metrics = kwargs.get("metrics_dict")
28
+ pricing_info = kwargs.get("pricing_info", {})
29
+ trace_content = kwargs.get("trace_content", False)
30
+ disable_metrics = kwargs.get("disable_metrics")
31
+ version = importlib.metadata.version("firecrawl-py")
32
+
33
+ wrap_function_wrapper(
34
+ "firecrawl.firecrawl",
35
+ "FirecrawlApp.scrape_url",
36
+ wrap_crawl("firecrawl.scrape_url", version, environment, application_name,
37
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
38
+ )
39
+
40
+ wrap_function_wrapper(
41
+ "firecrawl.firecrawl",
42
+ "FirecrawlApp.crawl_url",
43
+ wrap_crawl("firecrawl.crawl_url", version, environment, application_name,
44
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
45
+ )
46
+
47
+ def _uninstrument(self, **kwargs):
48
+ # Proper uninstrumentation logic to revert patched methods
49
+ pass
@@ -0,0 +1,90 @@
1
+ # pylint: disable=duplicate-code, broad-exception-caught, too-many-statements, unused-argument
2
+ """
3
+ Module for monitoring FireCrawl calls.
4
+ """
5
+
6
+ import logging
7
+ from opentelemetry.trace import SpanKind, Status, StatusCode
8
+ from opentelemetry.sdk.resources import TELEMETRY_SDK_NAME
9
+ from openlit.__helpers import (
10
+ handle_exception,
11
+ )
12
+ from openlit.semcov import SemanticConvetion
13
+
14
+ # Initialize logger for logging potential issues and operations
15
+ logger = logging.getLogger(__name__)
16
+
17
+ def wrap_crawl(gen_ai_endpoint, version, environment, application_name,
18
+ tracer, pricing_info, trace_content, metrics, disable_metrics):
19
+ """
20
+ Generates a telemetry wrapper for chat completions to collect metrics.
21
+
22
+ Args:
23
+ gen_ai_endpoint: Endpoint identifier for logging and tracing.
24
+ version: Version of the monitoring package.
25
+ environment: Deployment environment (e.g., production, staging).
26
+ application_name: Name of the application using the FireCrawl Agent.
27
+ tracer: OpenTelemetry tracer for creating spans.
28
+ pricing_info: Information used for calculating the cost of FireCrawl usage.
29
+ trace_content: Flag indicating whether to trace the actual content.
30
+
31
+ Returns:
32
+ A function that wraps the chat completions method to add telemetry.
33
+ """
34
+
35
+ def wrapper(wrapped, instance, args, kwargs):
36
+ """
37
+ Wraps the API call to add telemetry.
38
+
39
+ This collects metrics such as execution time, cost, and token usage, and handles errors
40
+ gracefully, adding details to the trace for observability.
41
+
42
+ Args:
43
+ wrapped: The original method to be wrapped.
44
+ instance: The instance of the class where the original method is defined.
45
+ args: Positional arguments for the method.
46
+ kwargs: Keyword arguments for the method.
47
+
48
+ Returns:
49
+ The response from the original method.
50
+ """
51
+
52
+ # pylint: disable=line-too-long
53
+ with tracer.start_as_current_span(gen_ai_endpoint, kind= SpanKind.CLIENT) as span:
54
+ response = wrapped(*args, **kwargs)
55
+
56
+ try:
57
+ # Set base span attribues
58
+ span.set_attribute(TELEMETRY_SDK_NAME, "openlit")
59
+ span.set_attribute(SemanticConvetion.GEN_AI_SYSTEM,
60
+ SemanticConvetion.GEN_AI_SYSTEM_FIRECRAWL)
61
+ span.set_attribute(SemanticConvetion.GEN_AI_TYPE,
62
+ SemanticConvetion.GEN_AI_TYPE_AGENT)
63
+ span.set_attribute(SemanticConvetion.GEN_AI_ENDPOINT,
64
+ gen_ai_endpoint)
65
+ span.set_attribute(SemanticConvetion.GEN_AI_APPLICATION_NAME,
66
+ application_name)
67
+ span.set_attribute(SemanticConvetion.GEN_AI_ENVIRONMENT,
68
+ environment)
69
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_TYPE,
70
+ SemanticConvetion.GEN_AI_AGENT_TYPE_BROWSER)
71
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_PARAMS,
72
+ str(kwargs.get("params")))
73
+
74
+ url = kwargs.get("url") if "url" in kwargs else str(args[0]) if args else None
75
+ if url is not None:
76
+ span.set_attribute(SemanticConvetion.GEN_AI_AGENT_BROWSE_URL, url)
77
+
78
+ span.set_status(Status(StatusCode.OK))
79
+
80
+ # Return original response
81
+ return response
82
+
83
+ except Exception as e:
84
+ handle_exception(span, e)
85
+ logger.error("Error in trace creation: %s", e)
86
+
87
+ # Return original response
88
+ return response
89
+
90
+ return wrapper
@@ -0,0 +1,77 @@
1
+ # pylint: disable=useless-return, bad-staticmethod-argument, disable=duplicate-code
2
+ """Initializer of Auto Instrumentation of Letta Functions"""
3
+
4
+ from typing import Collection
5
+ import importlib.metadata
6
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
7
+ from wrapt import wrap_function_wrapper
8
+
9
+ from openlit.instrumentation.letta.letta import (
10
+ create_agent, send_message
11
+ )
12
+
13
+ _instruments = ("letta >= 0.6.2",)
14
+
15
+ class LettaInstrumentor(BaseInstrumentor):
16
+ """
17
+ An instrumentor for Letta's client library.
18
+ """
19
+
20
+ def instrumentation_dependencies(self) -> Collection[str]:
21
+ return _instruments
22
+
23
+ def _instrument(self, **kwargs):
24
+ application_name = kwargs.get("application_name", "default_application")
25
+ environment = kwargs.get("environment", "default_environment")
26
+ tracer = kwargs.get("tracer")
27
+ metrics = kwargs.get("metrics_dict")
28
+ pricing_info = kwargs.get("pricing_info", {})
29
+ trace_content = kwargs.get("trace_content", False)
30
+ disable_metrics = kwargs.get("disable_metrics")
31
+ version = importlib.metadata.version("letta")
32
+
33
+ wrap_function_wrapper(
34
+ "letta.client.client",
35
+ "LocalClient.create_agent",
36
+ create_agent("letta.create_agent", version, environment, application_name,
37
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
38
+ )
39
+
40
+ wrap_function_wrapper(
41
+ "letta.client.client",
42
+ "LocalClient.get_agent",
43
+ create_agent("letta.get_agent", version, environment, application_name,
44
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
45
+ )
46
+
47
+ wrap_function_wrapper(
48
+ "letta.client.client",
49
+ "LocalClient.send_message",
50
+ send_message("letta.send_message", version, environment, application_name,
51
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
52
+ )
53
+
54
+ wrap_function_wrapper(
55
+ "letta.client.client",
56
+ "RESTClient.create_agent",
57
+ create_agent("letta.create_agent", version, environment, application_name,
58
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
59
+ )
60
+
61
+ wrap_function_wrapper(
62
+ "letta.client.client",
63
+ "RESTClient.get_agent",
64
+ create_agent("letta.get_agent", version, environment, application_name,
65
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
66
+ )
67
+
68
+ wrap_function_wrapper(
69
+ "letta.client.client",
70
+ "RESTClient.send_message",
71
+ send_message("letta.send_message", version, environment, application_name,
72
+ tracer, pricing_info, trace_content, metrics, disable_metrics),
73
+ )
74
+
75
+ def _uninstrument(self, **kwargs):
76
+ # Proper uninstrumentation logic to revert patched methods
77
+ pass