langtrace-python-sdk 2.0.0__tar.gz → 2.0.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/PKG-INFO +14 -12
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/README.md +12 -10
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/pyproject.toml +1 -1
- langtrace_python_sdk-2.0.2/src/examples/langchain_example/groq_example.py +30 -0
- langtrace_python_sdk-2.0.2/src/examples/langchain_example/langgraph_example.py +85 -0
- langtrace_python_sdk-2.0.2/src/examples/qdrant_example/basic.py +50 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/instrumentation/common.py +3 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/constants/instrumentation/groq.py +8 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/constants/instrumentation/qdrant.py +72 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/cohere/patch.py +34 -26
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/groq/instrumentation.py +56 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/groq/patch.py +578 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/langgraph/instrumentation.py +65 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/langgraph/patch.py +113 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/qdrant/__init__.py +0 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/qdrant/instrumentation.py +55 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/qdrant/patch.py +72 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/langtrace.py +12 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/utils/__init__.py +0 -0
- langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/version.py +1 -0
- langtrace_python_sdk-2.0.2/src/tests/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/anthropic/conftest.py +0 -3
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/anthropic/test_anthropic.py +6 -29
- langtrace_python_sdk-2.0.2/src/tests/cohere/cassettes/test_cohere_chat.yaml +91 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/cassettes/test_cohere_chat_streaming.yaml +275 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/cassettes/test_cohere_embed.yaml +73 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/cassettes/test_cohere_rerank.yaml +76 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/conftest.py +30 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/test_cohere_chat.py +116 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/test_cohere_embed.py +35 -0
- langtrace_python_sdk-2.0.2/src/tests/cohere/test_cohere_rerank.py +52 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/conftest.py +5 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/conftest.py +0 -3
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/test_chat_completion.py +19 -30
- langtrace_python_sdk-2.0.2/src/tests/openai/test_embeddings.py +0 -0
- langtrace_python_sdk-2.0.2/src/tests/utils.py +44 -0
- langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/version.py +0 -1
- langtrace_python_sdk-2.0.0/src/tests/utils.py +0 -21
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/.gitignore +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/LICENSE +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/anthropic_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/anthropic_example/completion.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/chroma_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/chroma_example/basic.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/cohere_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/cohere_example/chat.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/cohere_example/chat_stream.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/cohere_example/embed.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/cohere_example/rerank.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/cohere_example/tools.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/fastapi_example/basic_route.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/hiveagent_example/basic.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/langchain_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/langchain_example/basic.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/langchain_example/tool.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/llamaindex_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/llamaindex_example/agent.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/llamaindex_example/basic.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/llamaindex_example/data/abramov.txt +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/async_tool_calling_nonstreaming.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/async_tool_calling_streaming.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/chat_completion.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/embeddings_create.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/function_calling.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/images_generate.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/tool_calling.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/tool_calling_nonstreaming.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/openai_example/tool_calling_streaming.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/perplexity_example/basic.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/pinecone_example/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/examples/pinecone_example/basic.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/constants → langtrace_python_sdk-2.0.2/src/examples/qdrant_example}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/constants/instrumentation → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/constants}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/exporter/langtrace_exporter.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/extensions → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/constants/instrumentation}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/instrumentation/anthropic.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/instrumentation/chroma.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/instrumentation/cohere.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/instrumentation/openai.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/constants/instrumentation/pinecone.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/extensions}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/extensions/langtrace_exporter.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/anthropic → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/chroma → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/anthropic}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/cohere → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/chroma}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/chroma/patch.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/langchain → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/cohere}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/cohere/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/langchain_community → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/groq}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/langchain_core → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/langchain}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/langchain/patch.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/llamaindex → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/langchain_community}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/langchain_community/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/langchain_community/patch.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/openai → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/langchain_core}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/langchain_core/patch.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/instrumentation/pinecone → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/langgraph}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0/src/langtrace_python_sdk/utils → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/llamaindex}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/llamaindex/patch.py +0 -0
- {langtrace_python_sdk-2.0.0/src/tests → langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/openai}/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/openai/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/openai/patch.py +0 -0
- /langtrace_python_sdk-2.0.0/src/tests/openai/test_embeddings.py → /langtrace_python_sdk-2.0.2/src/langtrace_python_sdk/instrumentation/pinecone/__init__.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/pinecone/instrumentation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/instrumentation/pinecone/patch.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/utils/llm.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/langtrace_python_sdk/utils/with_root_span.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/run_example.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/anthropic/cassettes/test_anthropic.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/anthropic/cassettes/test_anthropic_streaming.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/anthropic/cassettes/test_async_anthropic_streaming.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/chroma/test_chroma.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/langchain/test_langchain.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/langchain/test_langchain_community.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/langchain/test_langchain_core.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/cassettes/test_async_chat_completion_streaming.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/cassettes/test_async_image_generation.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/cassettes/test_chat_completion.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/cassettes/test_chat_completion_streaming.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/cassettes/test_image_generation.yaml +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/openai/test_image_generation.py +0 -0
- {langtrace_python_sdk-2.0.0 → langtrace_python_sdk-2.0.2}/src/tests/pinecone/test_pinecone.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: langtrace-python-sdk
|
|
3
|
-
Version: 2.0.
|
|
3
|
+
Version: 2.0.2
|
|
4
4
|
Summary: Python SDK for LangTrace
|
|
5
5
|
Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
6
|
Author-email: Scale3 Labs <engineering@scale3labs.com>
|
|
@@ -15,7 +15,7 @@ Requires-Dist: opentelemetry-instrumentation
|
|
|
15
15
|
Requires-Dist: opentelemetry-sdk
|
|
16
16
|
Requires-Dist: pinecone-client
|
|
17
17
|
Requires-Dist: tiktoken
|
|
18
|
-
Requires-Dist: trace-attributes==3.0.
|
|
18
|
+
Requires-Dist: trace-attributes==3.0.5
|
|
19
19
|
Provides-Extra: dev
|
|
20
20
|
Requires-Dist: anthropic; extra == 'dev'
|
|
21
21
|
Requires-Dist: chromadb; extra == 'dev'
|
|
@@ -221,16 +221,18 @@ def chat_completion():
|
|
|
221
221
|
|
|
222
222
|
Langtrace automatically captures traces from the following vendors:
|
|
223
223
|
|
|
224
|
-
| Vendor
|
|
225
|
-
|
|
|
226
|
-
| OpenAI
|
|
227
|
-
| Anthropic
|
|
228
|
-
|
|
|
229
|
-
|
|
|
230
|
-
|
|
|
231
|
-
|
|
|
232
|
-
|
|
|
233
|
-
|
|
|
224
|
+
| Vendor | Type | Typescript SDK | Python SDK |
|
|
225
|
+
| ------------ | --------------- | ------------------ | ------------------ |
|
|
226
|
+
| OpenAI | LLM | :white_check_mark: | :white_check_mark: |
|
|
227
|
+
| Anthropic | LLM | :white_check_mark: | :white_check_mark: |
|
|
228
|
+
| Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
|
|
229
|
+
| Cohere | LLM | :white_check_mark: | :white_check_mark: |
|
|
230
|
+
| Groq | LLM | :x: | :white_check_mark: |
|
|
231
|
+
| Langchain | Framework | :x: | :white_check_mark: |
|
|
232
|
+
| LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
|
|
233
|
+
| Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
|
|
234
|
+
| ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
|
|
235
|
+
| QDrant | Vector Database | :x: | :white_check_mark: |
|
|
234
236
|
|
|
235
237
|
---
|
|
236
238
|
|
|
@@ -188,16 +188,18 @@ def chat_completion():
|
|
|
188
188
|
|
|
189
189
|
Langtrace automatically captures traces from the following vendors:
|
|
190
190
|
|
|
191
|
-
| Vendor
|
|
192
|
-
|
|
|
193
|
-
| OpenAI
|
|
194
|
-
| Anthropic
|
|
195
|
-
|
|
|
196
|
-
|
|
|
197
|
-
|
|
|
198
|
-
|
|
|
199
|
-
|
|
|
200
|
-
|
|
|
191
|
+
| Vendor | Type | Typescript SDK | Python SDK |
|
|
192
|
+
| ------------ | --------------- | ------------------ | ------------------ |
|
|
193
|
+
| OpenAI | LLM | :white_check_mark: | :white_check_mark: |
|
|
194
|
+
| Anthropic | LLM | :white_check_mark: | :white_check_mark: |
|
|
195
|
+
| Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
|
|
196
|
+
| Cohere | LLM | :white_check_mark: | :white_check_mark: |
|
|
197
|
+
| Groq | LLM | :x: | :white_check_mark: |
|
|
198
|
+
| Langchain | Framework | :x: | :white_check_mark: |
|
|
199
|
+
| LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
|
|
200
|
+
| Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
|
|
201
|
+
| ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
|
|
202
|
+
| QDrant | Vector Database | :x: | :white_check_mark: |
|
|
201
203
|
|
|
202
204
|
---
|
|
203
205
|
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from dotenv import find_dotenv, load_dotenv
|
|
2
|
+
from langchain_core.prompts import ChatPromptTemplate
|
|
3
|
+
from langchain_groq import ChatGroq
|
|
4
|
+
|
|
5
|
+
_ = load_dotenv(find_dotenv())
|
|
6
|
+
|
|
7
|
+
from langtrace_python_sdk import langtrace
|
|
8
|
+
|
|
9
|
+
# from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
10
|
+
|
|
11
|
+
_ = load_dotenv(find_dotenv())
|
|
12
|
+
|
|
13
|
+
langtrace.init()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def groq_example():
|
|
17
|
+
|
|
18
|
+
chat = ChatGroq(temperature=0, model_name="mixtral-8x7b-32768")
|
|
19
|
+
|
|
20
|
+
system = "You are a helpful assistant."
|
|
21
|
+
human = "{text}"
|
|
22
|
+
prompt = ChatPromptTemplate.from_messages([("system", system), ("human", human)])
|
|
23
|
+
|
|
24
|
+
chain = prompt | chat
|
|
25
|
+
result = chain.invoke({"text": "Explain the importance of low latency LLMs in 2 sentences or less."})
|
|
26
|
+
# print(result)
|
|
27
|
+
return result
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
groq_example()
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
from dotenv import find_dotenv, load_dotenv
|
|
4
|
+
from langchain_core.messages import HumanMessage, ToolMessage
|
|
5
|
+
from langchain_core.tools import tool
|
|
6
|
+
from langchain_core.utils.function_calling import convert_to_openai_tool
|
|
7
|
+
from langchain_openai import ChatOpenAI
|
|
8
|
+
from langgraph.graph import END, MessageGraph
|
|
9
|
+
|
|
10
|
+
from langtrace_python_sdk import langtrace
|
|
11
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
12
|
+
|
|
13
|
+
_ = load_dotenv(find_dotenv())
|
|
14
|
+
|
|
15
|
+
langtrace.init()
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@tool
|
|
19
|
+
def multiply(first_number: int, second_number: int):
|
|
20
|
+
"""Multiplies two numbers together."""
|
|
21
|
+
return first_number * second_number
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
model = ChatOpenAI(temperature=0)
|
|
25
|
+
model_with_tools = model.bind(tools=[convert_to_openai_tool(multiply)])
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def invoke_model(state):
|
|
29
|
+
return model_with_tools.invoke(state)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def router(state):
|
|
33
|
+
tool_calls = state[-1].additional_kwargs.get("tool_calls", [])
|
|
34
|
+
if len(tool_calls):
|
|
35
|
+
return "multiply"
|
|
36
|
+
else:
|
|
37
|
+
return "end"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def invoke_tool(state):
|
|
41
|
+
tool_calls = state[-1].additional_kwargs.get("tool_calls", [])
|
|
42
|
+
multiply_call = None
|
|
43
|
+
|
|
44
|
+
for tool_call in tool_calls:
|
|
45
|
+
if tool_call.get("function").get("name") == "multiply":
|
|
46
|
+
multiply_call = tool_call
|
|
47
|
+
|
|
48
|
+
if multiply_call is None:
|
|
49
|
+
raise Exception("No adder input found.")
|
|
50
|
+
|
|
51
|
+
res = multiply.invoke(
|
|
52
|
+
json.loads(multiply_call.get("function").get("arguments"))
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
return ToolMessage(
|
|
56
|
+
tool_call_id=multiply_call.get("id"),
|
|
57
|
+
content=res
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@with_langtrace_root_span('langgraph_example')
|
|
62
|
+
def basic():
|
|
63
|
+
|
|
64
|
+
graph = MessageGraph()
|
|
65
|
+
|
|
66
|
+
graph.add_node("oracle", invoke_model)
|
|
67
|
+
|
|
68
|
+
graph.add_node("multiply", invoke_tool)
|
|
69
|
+
|
|
70
|
+
graph.add_conditional_edges("oracle", router, {
|
|
71
|
+
"multiply": "multiply",
|
|
72
|
+
"end": END,
|
|
73
|
+
})
|
|
74
|
+
|
|
75
|
+
graph.add_edge("multiply", END)
|
|
76
|
+
|
|
77
|
+
graph.set_entry_point("oracle")
|
|
78
|
+
|
|
79
|
+
runnable = graph.compile()
|
|
80
|
+
|
|
81
|
+
answer = runnable.invoke(HumanMessage("What is 1 + 1?"))
|
|
82
|
+
print(answer)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
basic()
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import cohere
|
|
2
|
+
from dotenv import find_dotenv, load_dotenv
|
|
3
|
+
from qdrant_client import QdrantClient
|
|
4
|
+
from qdrant_client.models import Batch, Distance, VectorParams
|
|
5
|
+
|
|
6
|
+
from langtrace_python_sdk import langtrace
|
|
7
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
8
|
+
|
|
9
|
+
_ = load_dotenv(find_dotenv())
|
|
10
|
+
|
|
11
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@with_langtrace_root_span()
|
|
15
|
+
def basic():
|
|
16
|
+
client = QdrantClient(":memory:")
|
|
17
|
+
cohere_client = cohere.Client()
|
|
18
|
+
|
|
19
|
+
client.create_collection(collection_name="MyCollection4", vectors_config=VectorParams(
|
|
20
|
+
size=1024,
|
|
21
|
+
distance=Distance.COSINE,
|
|
22
|
+
))
|
|
23
|
+
|
|
24
|
+
client.upsert(
|
|
25
|
+
collection_name="MyCollection4",
|
|
26
|
+
points=Batch(
|
|
27
|
+
ids=[1],
|
|
28
|
+
vectors=cohere_client.embed(
|
|
29
|
+
model="embed-english-v3.0", # New Embed v3 model
|
|
30
|
+
input_type="search_document", # Input type for documents
|
|
31
|
+
texts=["Qdrant is the a vector database written in Rust"],
|
|
32
|
+
).embeddings,
|
|
33
|
+
),
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
answer = client.search(
|
|
37
|
+
collection_name="MyCollection4",
|
|
38
|
+
query_vector=cohere_client.embed(
|
|
39
|
+
model="embed-english-v3.0", # New Embed v3 model
|
|
40
|
+
input_type="search_query", # Input type for search queries
|
|
41
|
+
texts=["Which database is written in Rust?"],
|
|
42
|
+
).embeddings[0],
|
|
43
|
+
|
|
44
|
+
)
|
|
45
|
+
print(answer[0])
|
|
46
|
+
|
|
47
|
+
return answer
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
basic()
|
|
@@ -10,14 +10,17 @@ SERVICE_PROVIDERS = {
|
|
|
10
10
|
"ANTHROPIC": "Anthropic",
|
|
11
11
|
"AZURE": "Azure",
|
|
12
12
|
"CHROMA": "Chroma",
|
|
13
|
+
"GROQ": "Groq",
|
|
13
14
|
"LANGCHAIN": "Langchain",
|
|
14
15
|
"LANGCHAIN_COMMUNITY": "Langchain Community",
|
|
15
16
|
"LANGCHAIN_CORE": "Langchain Core",
|
|
17
|
+
"LANGGRAPH": "Langgraph",
|
|
16
18
|
"LLAMAINDEX": "LlamaIndex",
|
|
17
19
|
"OPENAI": "OpenAI",
|
|
18
20
|
"PINECONE": "Pinecone",
|
|
19
21
|
"COHERE": "Cohere",
|
|
20
22
|
"PPLX": "Perplexity",
|
|
23
|
+
"QDRANT": "Qdrant",
|
|
21
24
|
}
|
|
22
25
|
|
|
23
26
|
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY = "langtrace_additional_attributes"
|
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
from langtrace.trace_attributes import QdrantDBMethods
|
|
2
|
+
|
|
3
|
+
APIS = {
|
|
4
|
+
"ADD": {
|
|
5
|
+
"METHOD": QdrantDBMethods.ADD.value,
|
|
6
|
+
"OPERATION": "add",
|
|
7
|
+
},
|
|
8
|
+
"GET_COLLECTION": {
|
|
9
|
+
"METHOD": QdrantDBMethods.GET_COLLECTION.value,
|
|
10
|
+
"OPERATION": "get_collection",
|
|
11
|
+
},
|
|
12
|
+
"GET_COLLECTIONS": {
|
|
13
|
+
"METHOD": QdrantDBMethods.GET_COLLECTIONS.value,
|
|
14
|
+
"OPERATION": "get_collections",
|
|
15
|
+
},
|
|
16
|
+
"QUERY": {
|
|
17
|
+
"METHOD": QdrantDBMethods.QUERY.value,
|
|
18
|
+
"OPERATION": "query",
|
|
19
|
+
},
|
|
20
|
+
"QUERY_BATCH": {
|
|
21
|
+
"METHOD": QdrantDBMethods.QUERY_BATCH.value,
|
|
22
|
+
"OPERATION": "query_batch",
|
|
23
|
+
},
|
|
24
|
+
"DELETE": {
|
|
25
|
+
"METHOD": QdrantDBMethods.DELETE.value,
|
|
26
|
+
"OPERATION": "delete",
|
|
27
|
+
},
|
|
28
|
+
"DISCOVER": {
|
|
29
|
+
"METHOD": QdrantDBMethods.DISCOVER.value,
|
|
30
|
+
"OPERATION": "discover",
|
|
31
|
+
},
|
|
32
|
+
"DISCOVER_BATCH": {
|
|
33
|
+
"METHOD": QdrantDBMethods.DISCOVER_BATCH.value,
|
|
34
|
+
"OPERATION": "discover_batch",
|
|
35
|
+
},
|
|
36
|
+
"RECOMMEND": {
|
|
37
|
+
"METHOD": QdrantDBMethods.RECOMMEND.value,
|
|
38
|
+
"OPERATION": "recommend",
|
|
39
|
+
},
|
|
40
|
+
"RECOMMEND_BATCH": {
|
|
41
|
+
"METHOD": QdrantDBMethods.RECOMMEND_BATCH.value,
|
|
42
|
+
"OPERATION": "recommend_batch",
|
|
43
|
+
},
|
|
44
|
+
"RETRIEVE": {
|
|
45
|
+
"METHOD": QdrantDBMethods.RETRIEVE.value,
|
|
46
|
+
"OPERATION": "retrieve",
|
|
47
|
+
},
|
|
48
|
+
"SEARCH": {
|
|
49
|
+
"METHOD": QdrantDBMethods.SEARCH.value,
|
|
50
|
+
"OPERATION": "search",
|
|
51
|
+
},
|
|
52
|
+
"SEARCH_BATCH": {
|
|
53
|
+
"METHOD": QdrantDBMethods.SEARCH_BATCH.value,
|
|
54
|
+
"OPERATION": "search_batch",
|
|
55
|
+
},
|
|
56
|
+
"UPSERT": {
|
|
57
|
+
"METHOD": QdrantDBMethods.UPSERT.value,
|
|
58
|
+
"OPERATION": "upsert",
|
|
59
|
+
},
|
|
60
|
+
"COUNT": {
|
|
61
|
+
"METHOD": QdrantDBMethods.COUNT.value,
|
|
62
|
+
"OPERATION": "count",
|
|
63
|
+
},
|
|
64
|
+
"UPDATE_COLLECTION": {
|
|
65
|
+
"METHOD": QdrantDBMethods.UPDATE_COLLECTION.value,
|
|
66
|
+
"OPERATION": "update_collection",
|
|
67
|
+
},
|
|
68
|
+
"UPDATE_VECTORS": {
|
|
69
|
+
"METHOD": QdrantDBMethods.UPDATE_VECTORS.value,
|
|
70
|
+
"OPERATION": "update_vectors",
|
|
71
|
+
},
|
|
72
|
+
}
|
|
@@ -23,7 +23,9 @@ from opentelemetry.trace.status import Status, StatusCode
|
|
|
23
23
|
|
|
24
24
|
from langtrace_python_sdk.constants.instrumentation.cohere import APIS
|
|
25
25
|
from langtrace_python_sdk.constants.instrumentation.common import (
|
|
26
|
-
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
|
|
26
|
+
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
|
|
27
|
+
SERVICE_PROVIDERS,
|
|
28
|
+
)
|
|
27
29
|
|
|
28
30
|
|
|
29
31
|
def rerank(original_method, version, tracer):
|
|
@@ -103,9 +105,7 @@ def rerank(original_method, version, tracer):
|
|
|
103
105
|
else 0
|
|
104
106
|
),
|
|
105
107
|
}
|
|
106
|
-
span.set_attribute(
|
|
107
|
-
"llm.token.counts", json.dumps(usage_dict)
|
|
108
|
-
)
|
|
108
|
+
span.set_attribute("llm.token.counts", json.dumps(usage_dict))
|
|
109
109
|
|
|
110
110
|
span.set_status(StatusCode.OK)
|
|
111
111
|
span.end()
|
|
@@ -187,9 +187,7 @@ def embed(original_method, version, tracer):
|
|
|
187
187
|
else 0
|
|
188
188
|
),
|
|
189
189
|
}
|
|
190
|
-
span.set_attribute(
|
|
191
|
-
"llm.token.counts", json.dumps(usage_dict)
|
|
192
|
-
)
|
|
190
|
+
span.set_attribute("llm.token.counts", json.dumps(usage_dict))
|
|
193
191
|
|
|
194
192
|
span.set_status(StatusCode.OK)
|
|
195
193
|
span.end()
|
|
@@ -226,9 +224,7 @@ def chat_create(original_method, version, tracer):
|
|
|
226
224
|
item.get("role") if item.get("role") is not None else "USER"
|
|
227
225
|
),
|
|
228
226
|
"content": (
|
|
229
|
-
item.get("message")
|
|
230
|
-
if item.get("message") is not None
|
|
231
|
-
else ""
|
|
227
|
+
item.get("message") if item.get("message") is not None else ""
|
|
232
228
|
),
|
|
233
229
|
}
|
|
234
230
|
for item in chat_history
|
|
@@ -312,7 +308,11 @@ def chat_create(original_method, version, tracer):
|
|
|
312
308
|
span.set_attribute("llm.is_search_required", result.is_search_required)
|
|
313
309
|
|
|
314
310
|
if kwargs.get("stream") is False or kwargs.get("stream") is None:
|
|
315
|
-
if
|
|
311
|
+
if (
|
|
312
|
+
hasattr(result, "text")
|
|
313
|
+
and result.text is not None
|
|
314
|
+
and result.text != ""
|
|
315
|
+
):
|
|
316
316
|
if (
|
|
317
317
|
hasattr(result, "chat_history")
|
|
318
318
|
and result.chat_history is not None
|
|
@@ -321,8 +321,7 @@ def chat_create(original_method, version, tracer):
|
|
|
321
321
|
{
|
|
322
322
|
"role": (
|
|
323
323
|
item.role
|
|
324
|
-
if hasattr(item, "role")
|
|
325
|
-
and item.role is not None
|
|
324
|
+
if hasattr(item, "role") and item.role is not None
|
|
326
325
|
else "USER"
|
|
327
326
|
),
|
|
328
327
|
"content": (
|
|
@@ -336,9 +335,7 @@ def chat_create(original_method, version, tracer):
|
|
|
336
335
|
]
|
|
337
336
|
span.set_attribute("llm.responses", json.dumps(responses))
|
|
338
337
|
else:
|
|
339
|
-
responses = [
|
|
340
|
-
{"role": "CHATBOT", "content": result.text}
|
|
341
|
-
]
|
|
338
|
+
responses = [{"role": "CHATBOT", "content": result.text}]
|
|
342
339
|
span.set_attribute("llm.responses", json.dumps(responses))
|
|
343
340
|
elif hasattr(result, "tool_calls") and result.tool_calls is not None:
|
|
344
341
|
tool_calls = []
|
|
@@ -422,9 +419,7 @@ def chat_stream(original_method, version, tracer):
|
|
|
422
419
|
item.get("role") if item.get("role") is not None else "USER"
|
|
423
420
|
),
|
|
424
421
|
"content": (
|
|
425
|
-
item.get("message")
|
|
426
|
-
if item.get("message") is not None
|
|
427
|
-
else ""
|
|
422
|
+
item.get("message") if item.get("message") is not None else ""
|
|
428
423
|
),
|
|
429
424
|
}
|
|
430
425
|
for item in chat_history
|
|
@@ -485,7 +480,7 @@ def chat_stream(original_method, version, tracer):
|
|
|
485
480
|
# stringify the list of objects
|
|
486
481
|
attributes.llm_tool_results = json.dumps(kwargs.get("tool_results"))
|
|
487
482
|
|
|
488
|
-
span = tracer.start_span(APIS["
|
|
483
|
+
span = tracer.start_span(APIS["CHAT_STREAM"]["METHOD"], kind=SpanKind.CLIENT)
|
|
489
484
|
for field, value in attributes.model_dump(by_alias=True).items():
|
|
490
485
|
if value is not None:
|
|
491
486
|
span.set_attribute(field, value)
|
|
@@ -503,19 +498,28 @@ def chat_stream(original_method, version, tracer):
|
|
|
503
498
|
Event.STREAM_OUTPUT.value, {"response": "".join(content)}
|
|
504
499
|
)
|
|
505
500
|
|
|
506
|
-
if
|
|
501
|
+
if (
|
|
502
|
+
hasattr(event, "finish_reason")
|
|
503
|
+
and event.finish_reason == "COMPLETE"
|
|
504
|
+
):
|
|
507
505
|
response = event.response
|
|
508
506
|
|
|
509
507
|
if (hasattr(response, "generation_id")) and (
|
|
510
508
|
response.generation_id is not None
|
|
511
509
|
):
|
|
512
|
-
span.set_attribute(
|
|
513
|
-
|
|
510
|
+
span.set_attribute(
|
|
511
|
+
"llm.generation_id", response.generation_id
|
|
512
|
+
)
|
|
513
|
+
if (hasattr(response, "response_id")) and (
|
|
514
|
+
response.response_id is not None
|
|
515
|
+
):
|
|
514
516
|
span.set_attribute("llm.response_id", response.response_id)
|
|
515
517
|
if (hasattr(response, "is_search_required")) and (
|
|
516
518
|
response.is_search_required is not None
|
|
517
519
|
):
|
|
518
|
-
span.set_attribute(
|
|
520
|
+
span.set_attribute(
|
|
521
|
+
"llm.is_search_required", response.is_search_required
|
|
522
|
+
)
|
|
519
523
|
|
|
520
524
|
# Set the response attributes
|
|
521
525
|
if hasattr(response, "text") and response.text is not None:
|
|
@@ -540,12 +544,16 @@ def chat_stream(original_method, version, tracer):
|
|
|
540
544
|
}
|
|
541
545
|
for item in response.chat_history
|
|
542
546
|
]
|
|
543
|
-
span.set_attribute(
|
|
547
|
+
span.set_attribute(
|
|
548
|
+
"llm.responses", json.dumps(responses)
|
|
549
|
+
)
|
|
544
550
|
else:
|
|
545
551
|
responses = [
|
|
546
552
|
{"role": "CHATBOT", "content": response.text}
|
|
547
553
|
]
|
|
548
|
-
span.set_attribute(
|
|
554
|
+
span.set_attribute(
|
|
555
|
+
"llm.responses", json.dumps(responses)
|
|
556
|
+
)
|
|
549
557
|
|
|
550
558
|
# Get the usage
|
|
551
559
|
if hasattr(response, "meta") and response.meta is not None:
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright (c) 2024 Scale3 Labs
|
|
3
|
+
|
|
4
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
you may not use this file except in compliance with the License.
|
|
6
|
+
You may obtain a copy of the License at
|
|
7
|
+
|
|
8
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
|
|
10
|
+
Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
See the License for the specific language governing permissions and
|
|
14
|
+
limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
import importlib.metadata
|
|
18
|
+
import logging
|
|
19
|
+
from typing import Collection
|
|
20
|
+
|
|
21
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
22
|
+
from opentelemetry.trace import get_tracer
|
|
23
|
+
from wrapt import wrap_function_wrapper
|
|
24
|
+
|
|
25
|
+
from langtrace_python_sdk.instrumentation.groq.patch import (
|
|
26
|
+
async_chat_completions_create, chat_completions_create)
|
|
27
|
+
|
|
28
|
+
logging.basicConfig(level=logging.FATAL)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class GroqInstrumentation(BaseInstrumentor):
|
|
32
|
+
|
|
33
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
34
|
+
return ["groq >= 0.5.0"]
|
|
35
|
+
|
|
36
|
+
def _instrument(self, **kwargs):
|
|
37
|
+
tracer_provider = kwargs.get("tracer_provider")
|
|
38
|
+
tracer = get_tracer(__name__, "", tracer_provider)
|
|
39
|
+
version = importlib.metadata.version("groq")
|
|
40
|
+
|
|
41
|
+
wrap_function_wrapper(
|
|
42
|
+
"groq.resources.chat.completions",
|
|
43
|
+
"Completions.create",
|
|
44
|
+
chat_completions_create("groq.chat.completions.create", version, tracer),
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
wrap_function_wrapper(
|
|
48
|
+
"groq.resources.chat.completions",
|
|
49
|
+
"AsyncCompletions.create",
|
|
50
|
+
async_chat_completions_create(
|
|
51
|
+
"groq.chat.completions.create_stream", version, tracer
|
|
52
|
+
),
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
def _uninstrument(self, **kwargs):
|
|
56
|
+
pass
|