langtrace-python-sdk 1.3.7__tar.gz → 2.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (128) hide show
  1. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/PKG-INFO +14 -12
  2. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/README.md +12 -10
  3. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/pyproject.toml +1 -1
  4. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/anthropic_example/completion.py +6 -6
  5. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/cohere_example/chat.py +5 -4
  6. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/cohere_example/chat_stream.py +2 -4
  7. langtrace_python_sdk-1.3.7/src/examples/cohere_example/embed_create.py → langtrace_python_sdk-2.0.1/src/examples/cohere_example/embed.py +4 -3
  8. langtrace_python_sdk-2.0.1/src/examples/cohere_example/rerank.py +31 -0
  9. langtrace_python_sdk-2.0.1/src/examples/cohere_example/tools.py +40 -0
  10. langtrace_python_sdk-2.0.1/src/examples/langchain_example/groq_example.py +30 -0
  11. langtrace_python_sdk-2.0.1/src/examples/langchain_example/langgraph_example.py +85 -0
  12. langtrace_python_sdk-2.0.1/src/examples/openai_example/chat_completion.py +41 -0
  13. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/embeddings_create.py +3 -2
  14. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/function_calling.py +3 -5
  15. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/images_generate.py +1 -1
  16. langtrace_python_sdk-2.0.1/src/examples/openai_example/tool_calling.py +67 -0
  17. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/tool_calling_nonstreaming.py +2 -1
  18. langtrace_python_sdk-2.0.1/src/examples/qdrant_example/basic.py +50 -0
  19. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/__init__.py +20 -0
  20. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/instrumentation/cohere.py +6 -1
  21. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/instrumentation/common.py +3 -0
  22. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/constants/instrumentation/groq.py +8 -0
  23. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/constants/instrumentation/qdrant.py +72 -0
  24. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py +16 -4
  25. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +26 -6
  26. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py +14 -2
  27. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/chroma/patch.py +16 -4
  28. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/cohere/instrumentation.py +24 -7
  29. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/cohere/patch.py +597 -0
  30. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/groq/instrumentation.py +56 -0
  31. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/groq/patch.py +578 -0
  32. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py +14 -3
  33. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/langchain/patch.py +16 -4
  34. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/langchain_community/instrumentation.py +15 -2
  35. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/langchain_community/patch.py +20 -3
  36. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py +14 -4
  37. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/langchain_core/patch.py +19 -7
  38. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/langgraph/instrumentation.py +65 -0
  39. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/langgraph/patch.py +113 -0
  40. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +15 -11
  41. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/llamaindex/patch.py +20 -10
  42. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/openai/instrumentation.py +20 -9
  43. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/openai/patch.py +112 -78
  44. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/pinecone/instrumentation.py +14 -3
  45. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/instrumentation/pinecone/patch.py +17 -4
  46. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/qdrant/__init__.py +0 -0
  47. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/qdrant/instrumentation.py +55 -0
  48. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/qdrant/patch.py +72 -0
  49. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/langtrace.py +52 -35
  50. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/utils/__init__.py +0 -0
  51. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/utils/llm.py +17 -4
  52. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/utils/with_root_span.py +21 -5
  53. langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/version.py +1 -0
  54. langtrace_python_sdk-2.0.1/src/run_example.py +56 -0
  55. langtrace_python_sdk-2.0.1/src/tests/__init__.py +0 -0
  56. langtrace_python_sdk-2.0.1/src/tests/anthropic/cassettes/test_anthropic.yaml +85 -0
  57. langtrace_python_sdk-2.0.1/src/tests/anthropic/cassettes/test_anthropic_streaming.yaml +456 -0
  58. langtrace_python_sdk-2.0.1/src/tests/anthropic/cassettes/test_async_anthropic_streaming.yaml +328 -0
  59. langtrace_python_sdk-2.0.1/src/tests/anthropic/conftest.py +38 -0
  60. langtrace_python_sdk-2.0.1/src/tests/anthropic/test_anthropic.py +109 -0
  61. langtrace_python_sdk-2.0.1/src/tests/conftest.py +17 -0
  62. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/conftest.py +5 -13
  63. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/test_chat_completion.py +21 -0
  64. langtrace_python_sdk-2.0.1/src/tests/openai/test_embeddings.py +0 -0
  65. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/test_image_generation.py +20 -8
  66. langtrace_python_sdk-1.3.7/src/examples/openai/chat_completion.py +0 -58
  67. langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/__init__.py +0 -7
  68. langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/cohere/patch.py +0 -397
  69. langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/version.py +0 -1
  70. langtrace_python_sdk-1.3.7/src/run_example.py +0 -44
  71. langtrace_python_sdk-1.3.7/src/tests/anthropic/test_anthropic.py +0 -73
  72. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/.gitignore +0 -0
  73. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/LICENSE +0 -0
  74. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/__init__.py +0 -0
  75. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/__init__.py +0 -0
  76. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/anthropic_example/__init__.py +0 -0
  77. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/chroma_example/__init__.py +0 -0
  78. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/chroma_example/basic.py +0 -0
  79. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/cohere_example/__init__.py +0 -0
  80. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/fastapi_example/basic_route.py +0 -0
  81. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/hiveagent_example/basic.py +0 -0
  82. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/langchain_example/__init__.py +0 -0
  83. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/langchain_example/basic.py +0 -0
  84. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/langchain_example/tool.py +0 -0
  85. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/llamaindex_example/__init__.py +0 -0
  86. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/llamaindex_example/agent.py +0 -0
  87. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/llamaindex_example/basic.py +0 -0
  88. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/llamaindex_example/data/abramov.txt +0 -0
  89. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/__init__.py +0 -0
  90. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/async_tool_calling_nonstreaming.py +0 -0
  91. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/async_tool_calling_streaming.py +0 -0
  92. {langtrace_python_sdk-1.3.7/src/examples/openai → langtrace_python_sdk-2.0.1/src/examples/openai_example}/tool_calling_streaming.py +0 -0
  93. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/perplexity_example/basic.py +0 -0
  94. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/pinecone_example/__init__.py +0 -0
  95. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/examples/pinecone_example/basic.py +0 -0
  96. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/constants → langtrace_python_sdk-2.0.1/src/examples/qdrant_example}/__init__.py +0 -0
  97. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/constants/instrumentation → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/constants}/__init__.py +0 -0
  98. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/exporter/langtrace_exporter.py +0 -0
  99. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/extensions → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/constants/instrumentation}/__init__.py +0 -0
  100. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/instrumentation/anthropic.py +0 -0
  101. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/instrumentation/chroma.py +0 -0
  102. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/instrumentation/openai.py +0 -0
  103. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/constants/instrumentation/pinecone.py +0 -0
  104. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/extensions}/__init__.py +0 -0
  105. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/langtrace_python_sdk/extensions/langtrace_exporter.py +0 -0
  106. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/anthropic → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation}/__init__.py +0 -0
  107. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/chroma → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/anthropic}/__init__.py +0 -0
  108. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/cohere → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/chroma}/__init__.py +0 -0
  109. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/langchain → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/cohere}/__init__.py +0 -0
  110. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/langchain_community → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/groq}/__init__.py +0 -0
  111. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/langchain_core → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/langchain}/__init__.py +0 -0
  112. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/llamaindex → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/langchain_community}/__init__.py +0 -0
  113. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/openai → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/langchain_core}/__init__.py +0 -0
  114. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/instrumentation/pinecone → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/langgraph}/__init__.py +0 -0
  115. {langtrace_python_sdk-1.3.7/src/langtrace_python_sdk/utils → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/llamaindex}/__init__.py +0 -0
  116. {langtrace_python_sdk-1.3.7/src/tests → langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/openai}/__init__.py +0 -0
  117. /langtrace_python_sdk-1.3.7/src/tests/openai/test_embeddings.py → /langtrace_python_sdk-2.0.1/src/langtrace_python_sdk/instrumentation/pinecone/__init__.py +0 -0
  118. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/chroma/test_chroma.py +0 -0
  119. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/langchain/test_langchain.py +0 -0
  120. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/langchain/test_langchain_community.py +0 -0
  121. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/langchain/test_langchain_core.py +0 -0
  122. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/cassettes/test_async_chat_completion_streaming.yaml +0 -0
  123. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/cassettes/test_async_image_generation.yaml +0 -0
  124. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/cassettes/test_chat_completion.yaml +0 -0
  125. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/cassettes/test_chat_completion_streaming.yaml +0 -0
  126. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/openai/cassettes/test_image_generation.yaml +0 -0
  127. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/pinecone/test_pinecone.py +0 -0
  128. {langtrace_python_sdk-1.3.7 → langtrace_python_sdk-2.0.1}/src/tests/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 1.3.7
3
+ Version: 2.0.1
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -15,7 +15,7 @@ Requires-Dist: opentelemetry-instrumentation
15
15
  Requires-Dist: opentelemetry-sdk
16
16
  Requires-Dist: pinecone-client
17
17
  Requires-Dist: tiktoken
18
- Requires-Dist: trace-attributes==1.0.32
18
+ Requires-Dist: trace-attributes==3.0.5
19
19
  Provides-Extra: dev
20
20
  Requires-Dist: anthropic; extra == 'dev'
21
21
  Requires-Dist: chromadb; extra == 'dev'
@@ -221,16 +221,18 @@ def chat_completion():
221
221
 
222
222
  Langtrace automatically captures traces from the following vendors:
223
223
 
224
- | Vendor | Type | Typescript SDK | Python SDK
225
- | ------ | ------ | ------ | ------ |
226
- | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
227
- | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
228
- | Cohere | LLM | :x: | :white_check_mark: |
229
- | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
230
- | Langchain | Framework | :x: | :white_check_mark: |
231
- | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
232
- | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
233
- | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
224
+ | Vendor | Type | Typescript SDK | Python SDK |
225
+ | ------------ | --------------- | ------------------ | ------------------ |
226
+ | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
227
+ | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
228
+ | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
229
+ | Cohere | LLM | :white_check_mark: | :white_check_mark: |
230
+ | Groq | LLM | :x: | :white_check_mark: |
231
+ | Langchain | Framework | :x: | :white_check_mark: |
232
+ | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
233
+ | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
234
+ | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
235
+ | QDrant | Vector Database | :x: | :white_check_mark: |
234
236
 
235
237
  ---
236
238
 
@@ -188,16 +188,18 @@ def chat_completion():
188
188
 
189
189
  Langtrace automatically captures traces from the following vendors:
190
190
 
191
- | Vendor | Type | Typescript SDK | Python SDK
192
- | ------ | ------ | ------ | ------ |
193
- | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
194
- | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
195
- | Cohere | LLM | :x: | :white_check_mark: |
196
- | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
197
- | Langchain | Framework | :x: | :white_check_mark: |
198
- | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
199
- | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
200
- | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
191
+ | Vendor | Type | Typescript SDK | Python SDK |
192
+ | ------------ | --------------- | ------------------ | ------------------ |
193
+ | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
194
+ | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
195
+ | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
196
+ | Cohere | LLM | :white_check_mark: | :white_check_mark: |
197
+ | Groq | LLM | :x: | :white_check_mark: |
198
+ | Langchain | Framework | :x: | :white_check_mark: |
199
+ | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
200
+ | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
201
+ | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
202
+ | QDrant | Vector Database | :x: | :white_check_mark: |
201
203
 
202
204
  ---
203
205
 
@@ -18,7 +18,7 @@ classifiers=[
18
18
  "Operating System :: OS Independent",
19
19
  ]
20
20
  dependencies = [
21
- 'trace-attributes==1.0.32',
21
+ 'trace-attributes==3.0.5',
22
22
  'opentelemetry-api',
23
23
  'opentelemetry-sdk',
24
24
  'opentelemetry-instrumentation',
@@ -3,13 +3,13 @@
3
3
  import anthropic
4
4
  from dotenv import find_dotenv, load_dotenv
5
5
 
6
- from langtrace_python_sdk import langtrace
7
- from langtrace_python_sdk import with_langtrace_root_span
6
+ from langtrace_python_sdk import langtrace, with_langtrace_root_span
8
7
 
9
8
  _ = load_dotenv(find_dotenv())
10
9
 
11
10
  langtrace.init(write_to_langtrace_cloud=False)
12
11
 
12
+
13
13
  @with_langtrace_root_span("messages_create")
14
14
  def messages_create():
15
15
 
@@ -21,10 +21,10 @@ def messages_create():
21
21
  temperature=0.0,
22
22
  system="Respond only in Yoda-speak.",
23
23
  messages=[{"role": "user", "content": "How are you today?"}],
24
- stream=False,
24
+ stream=True,
25
25
  )
26
26
 
27
- print(message)
27
+ # print(message)
28
28
 
29
- # for response in message:
30
- # pass
29
+ for response in message:
30
+ pass
@@ -1,7 +1,8 @@
1
- from dotenv import find_dotenv, load_dotenv
2
1
  import cohere
2
+ from dotenv import find_dotenv, load_dotenv
3
3
 
4
4
  from langtrace_python_sdk import langtrace
5
+
5
6
  # from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
6
7
 
7
8
  _ = load_dotenv(find_dotenv())
@@ -18,9 +19,9 @@ def chat_comp():
18
19
  {"role": "USER", "message": "Who discovered gravity?"},
19
20
  {"role": "CHATBOT", "message": "The man who is widely credited with discovering gravity is Sir Isaac Newton"}
20
21
  ],
21
- message="What is today's news?",
22
- # preamble="answer like yoda",
22
+ message="Tell me a story in 3 sentences or less?",
23
+ preamble="answer like a pirate",
23
24
  # perform web search before answering the question. You can also use your own custom connector.
24
- # connectors=[{"id": "web-search"}]
25
+ connectors=[{"id": "web-search"}]
25
26
  )
26
27
  print(response)
@@ -1,10 +1,8 @@
1
- from dotenv import find_dotenv, load_dotenv
2
1
  import cohere
2
+ from dotenv import find_dotenv, load_dotenv
3
3
 
4
4
  from langtrace_python_sdk import langtrace
5
5
 
6
- # from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
7
-
8
6
  _ = load_dotenv(find_dotenv())
9
7
 
10
8
  langtrace.init(write_to_langtrace_cloud=False)
@@ -15,7 +13,7 @@ co = cohere.Client()
15
13
  # @with_langtrace_root_span("chat_stream")
16
14
  def chat_stream():
17
15
  result = []
18
- for event in co.chat_stream(message="Tell me a short story in 2 lines"):
16
+ for event in co.chat_stream(message="Tell me a short story in 2 lines", preamble="Respond like a pirate", max_tokens=100):
19
17
  if event.event_type == "text-generation":
20
18
  result.append(event.text)
21
19
  elif event.event_type == "stream-end":
@@ -1,5 +1,5 @@
1
- from dotenv import find_dotenv, load_dotenv
2
1
  import cohere
2
+ from dotenv import find_dotenv, load_dotenv
3
3
 
4
4
  from langtrace_python_sdk import langtrace
5
5
 
@@ -13,10 +13,11 @@ co = cohere.Client()
13
13
 
14
14
 
15
15
  # @with_langtrace_root_span("embed_create")
16
- def embed_create():
16
+ def embed():
17
17
  response = co.embed(
18
18
  texts=["hello", "goodbye"],
19
19
  model="embed-english-v3.0",
20
20
  input_type="classification",
21
21
  )
22
- # print(response)
22
+ print(response)
23
+ return response
@@ -0,0 +1,31 @@
1
+ import cohere
2
+ from dotenv import find_dotenv, load_dotenv
3
+
4
+ from langtrace_python_sdk import langtrace
5
+
6
+ # from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
7
+
8
+ _ = load_dotenv(find_dotenv())
9
+
10
+ langtrace.init(write_to_langtrace_cloud=False)
11
+
12
+ co = cohere.Client()
13
+
14
+
15
+ # @with_langtrace_root_span("embed_create")
16
+ def rerank():
17
+ docs = [
18
+ "Carson City is the capital city of the American state of Nevada.",
19
+ "The Commonwealth of the Northern Mariana Islands is a group of islands in the Pacific Ocean. Its capital is Saipan.",
20
+ "Washington, D.C. (also known as simply Washington or D.C., and officially as the District of Columbia) is the capital of the United States. It is a federal district.",
21
+ "Capital punishment (the death penalty) has existed in the United States since beforethe United States was a country. As of 2017, capital punishment is legal in 30 of the 50 states.",
22
+ ]
23
+
24
+ response = co.rerank(
25
+ model="rerank-english-v2.0",
26
+ query="What is the capital of the United States?",
27
+ documents=docs,
28
+ top_n=3,
29
+ )
30
+ print(response)
31
+ return response
@@ -0,0 +1,40 @@
1
+ # import json
2
+
3
+ import cohere
4
+ from dotenv import find_dotenv, load_dotenv
5
+
6
+ from langtrace_python_sdk import langtrace
7
+
8
+ _ = load_dotenv(find_dotenv())
9
+
10
+ langtrace.init(write_to_langtrace_cloud=False)
11
+
12
+ co = cohere.Client()
13
+
14
+
15
+ student_custom_functions = [
16
+ {
17
+ "name": "extract_student_info",
18
+ "description": "Get the student information from the body of the input text",
19
+ "parameter_definitions": {
20
+ "name": {"type": "string", "description": "Name of the person", "required": True},
21
+ "major": {"type": "string", "description": "Major subject.", "required": True},
22
+ "school": {"type": "string", "description": "The university name.", "required": True},
23
+ "grades": {"type": "integer", "description": "GPA of the student.", "required": True},
24
+ "club": {
25
+ "type": "string",
26
+ "description": "School club for extracurricular activities. ",
27
+ "required": False,
28
+ },
29
+ },
30
+ }
31
+ ]
32
+
33
+
34
+ def tool_calling():
35
+ response = co.chat(
36
+ message="John is a grad student in computer science at Stanford University. He is an American and has a 3.8 GPA. John is known for his programming skills and is an active member of the university's Robotics Club. He hopes to pursue a career in artificial intelligence after graduating.",
37
+ tools=student_custom_functions,
38
+ )
39
+ print(response)
40
+ return response
@@ -0,0 +1,30 @@
1
+ from dotenv import find_dotenv, load_dotenv
2
+ from langchain_core.prompts import ChatPromptTemplate
3
+ from langchain_groq import ChatGroq
4
+
5
+ _ = load_dotenv(find_dotenv())
6
+
7
+ from langtrace_python_sdk import langtrace
8
+
9
+ # from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
10
+
11
+ _ = load_dotenv(find_dotenv())
12
+
13
+ langtrace.init()
14
+
15
+
16
+ def groq_example():
17
+
18
+ chat = ChatGroq(temperature=0, model_name="mixtral-8x7b-32768")
19
+
20
+ system = "You are a helpful assistant."
21
+ human = "{text}"
22
+ prompt = ChatPromptTemplate.from_messages([("system", system), ("human", human)])
23
+
24
+ chain = prompt | chat
25
+ result = chain.invoke({"text": "Explain the importance of low latency LLMs in 2 sentences or less."})
26
+ # print(result)
27
+ return result
28
+
29
+
30
+ groq_example()
@@ -0,0 +1,85 @@
1
+ import json
2
+
3
+ from dotenv import find_dotenv, load_dotenv
4
+ from langchain_core.messages import HumanMessage, ToolMessage
5
+ from langchain_core.tools import tool
6
+ from langchain_core.utils.function_calling import convert_to_openai_tool
7
+ from langchain_openai import ChatOpenAI
8
+ from langgraph.graph import END, MessageGraph
9
+
10
+ from langtrace_python_sdk import langtrace
11
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
12
+
13
+ _ = load_dotenv(find_dotenv())
14
+
15
+ langtrace.init()
16
+
17
+
18
+ @tool
19
+ def multiply(first_number: int, second_number: int):
20
+ """Multiplies two numbers together."""
21
+ return first_number * second_number
22
+
23
+
24
+ model = ChatOpenAI(temperature=0)
25
+ model_with_tools = model.bind(tools=[convert_to_openai_tool(multiply)])
26
+
27
+
28
+ def invoke_model(state):
29
+ return model_with_tools.invoke(state)
30
+
31
+
32
+ def router(state):
33
+ tool_calls = state[-1].additional_kwargs.get("tool_calls", [])
34
+ if len(tool_calls):
35
+ return "multiply"
36
+ else:
37
+ return "end"
38
+
39
+
40
+ def invoke_tool(state):
41
+ tool_calls = state[-1].additional_kwargs.get("tool_calls", [])
42
+ multiply_call = None
43
+
44
+ for tool_call in tool_calls:
45
+ if tool_call.get("function").get("name") == "multiply":
46
+ multiply_call = tool_call
47
+
48
+ if multiply_call is None:
49
+ raise Exception("No adder input found.")
50
+
51
+ res = multiply.invoke(
52
+ json.loads(multiply_call.get("function").get("arguments"))
53
+ )
54
+
55
+ return ToolMessage(
56
+ tool_call_id=multiply_call.get("id"),
57
+ content=res
58
+ )
59
+
60
+
61
+ @with_langtrace_root_span('langgraph_example')
62
+ def basic():
63
+
64
+ graph = MessageGraph()
65
+
66
+ graph.add_node("oracle", invoke_model)
67
+
68
+ graph.add_node("multiply", invoke_tool)
69
+
70
+ graph.add_conditional_edges("oracle", router, {
71
+ "multiply": "multiply",
72
+ "end": END,
73
+ })
74
+
75
+ graph.add_edge("multiply", END)
76
+
77
+ graph.set_entry_point("oracle")
78
+
79
+ runnable = graph.compile()
80
+
81
+ answer = runnable.invoke(HumanMessage("What is 1 + 1?"))
82
+ print(answer)
83
+
84
+
85
+ basic()
@@ -0,0 +1,41 @@
1
+ from dotenv import find_dotenv, load_dotenv
2
+ from openai import OpenAI
3
+
4
+ from langtrace_python_sdk import langtrace
5
+ from langtrace_python_sdk.utils.with_root_span import (
6
+ with_additional_attributes, with_langtrace_root_span)
7
+
8
+ _ = load_dotenv(find_dotenv())
9
+
10
+ langtrace.init(write_to_langtrace_cloud=False)
11
+ client = OpenAI()
12
+
13
+
14
+ # @with_additional_attributes({"user.id": "1234", "user.feedback.rating": 1})
15
+ def api():
16
+ response = client.chat.completions.create(
17
+ model="gpt-4",
18
+ messages=[{"role": "system", "content": "Talk like a pirate"}, {"role": "user", "content": "Tell me a story in 3 sentences or less."}],
19
+ stream=True,
20
+ # stream=False,
21
+ )
22
+ return response
23
+
24
+
25
+ # @with_langtrace_root_span()
26
+ def chat_completion():
27
+ response = api()
28
+ # print(response)
29
+ # Uncomment this for streaming
30
+ result = []
31
+ for chunk in response:
32
+ if chunk.choices[0].delta.content is not None:
33
+ content = [
34
+ choice.delta.content if choice.delta and
35
+ choice.delta.content else ""
36
+ for choice in chunk.choices]
37
+ result.append(
38
+ content[0] if len(content) > 0 else "")
39
+
40
+ print("".join(result))
41
+ return response
@@ -6,7 +6,7 @@ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
6
6
 
7
7
  _ = load_dotenv(find_dotenv())
8
8
 
9
- langtrace.init(batch=True, log_spans_to_console=True, write_to_remote_url=False)
9
+ langtrace.init(write_to_langtrace_cloud=False)
10
10
  client = OpenAI()
11
11
 
12
12
 
@@ -14,5 +14,6 @@ client = OpenAI()
14
14
  def embeddings_create():
15
15
  result = client.embeddings.create(
16
16
  model="text-embedding-ada-002",
17
- input="Once upon a time, there was a frog.",
17
+ input="Once upon a time, there was a pirate.",
18
18
  )
19
+ return result
@@ -1,10 +1,9 @@
1
- import json
1
+ # import json
2
2
 
3
3
  from dotenv import find_dotenv, load_dotenv
4
4
  from openai import OpenAI
5
5
 
6
6
  from langtrace_python_sdk import langtrace
7
- from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
8
7
 
9
8
  _ = load_dotenv(find_dotenv())
10
9
 
@@ -34,20 +33,19 @@ student_custom_functions = [
34
33
  ]
35
34
 
36
35
 
37
- # @with_langtrace_root_span()
38
36
  def function_calling():
39
37
  response = client.chat.completions.create(
40
38
  model="gpt-3.5-turbo",
41
39
  messages=[
42
40
  {
43
41
  "role": "user",
44
- "content": "David Nguyen is a sophomore majoring in computer science at Stanford University. He is Asian American and has a 3.8 GPA. David is known for his programming skills and is an active member of the university's Robotics Club. He hopes to pursue a career in artificial intelligence after graduating.",
42
+ "content": "John is a grad student in computer science at Stanford University. He is an American and has a 3.8 GPA. John is known for his programming skills and is an active member of the university's Robotics Club. He hopes to pursue a career in artificial intelligence after graduating.",
45
43
  }
46
44
  ],
47
45
  functions=student_custom_functions,
48
- function_call="auto",
49
46
  stream=True,
50
47
  )
48
+ # return response
51
49
 
52
50
  result = []
53
51
  for chunk in response:
@@ -6,7 +6,7 @@ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
6
6
 
7
7
  _ = load_dotenv(find_dotenv())
8
8
 
9
- langtrace.init(batch=True, log_spans_to_console=True, write_to_remote_url=False)
9
+ langtrace.init(write_to_langtrace_cloud=False)
10
10
 
11
11
  client = OpenAI()
12
12
 
@@ -0,0 +1,67 @@
1
+ # import json
2
+
3
+ from dotenv import find_dotenv, load_dotenv
4
+ from openai import OpenAI
5
+
6
+ from langtrace_python_sdk import langtrace
7
+
8
+ _ = load_dotenv(find_dotenv())
9
+
10
+ langtrace.init(write_to_langtrace_cloud=False)
11
+
12
+ client = OpenAI()
13
+
14
+
15
+ student_custom_functions = [
16
+ {
17
+ "type": "function",
18
+ "function": {
19
+ "name": "extract_student_info",
20
+ "description": "Get the student information from the body of the input text",
21
+ "parameters": {
22
+ "type": "object",
23
+ "properties": {
24
+ "name": {"type": "string", "description": "Name of the person"},
25
+ "major": {"type": "string", "description": "Major subject."},
26
+ "school": {"type": "string", "description": "The university name."},
27
+ "grades": {"type": "integer", "description": "GPA of the student."},
28
+ "club": {
29
+ "type": "string",
30
+ "description": "School club for extracurricular activities. ",
31
+ },
32
+ },
33
+ },
34
+ },
35
+ }
36
+ ]
37
+
38
+
39
+ def tool_calling():
40
+ response = client.chat.completions.create(
41
+ model="gpt-3.5-turbo",
42
+ messages=[
43
+ {
44
+ "role": "user",
45
+ "content": "John is a grad student in computer science at Stanford University. He is an American and has a 3.8 GPA. John is known for his programming skills and is an active member of the university's Robotics Club. He hopes to pursue a career in artificial intelligence after graduating.",
46
+ }
47
+ ],
48
+ tools=student_custom_functions,
49
+ stream=False,
50
+ )
51
+ return response
52
+
53
+ # result = []
54
+ # for chunk in response:
55
+ # if chunk.choices[0].delta.function_call is not None:
56
+ # content = [
57
+ # choice.delta.function_call.arguments if choice.delta.function_call and
58
+ # choice.delta.function_call.arguments else ""
59
+ # for choice in chunk.choices]
60
+ # result.append(
61
+ # content[0] if len(content) > 0 else "")
62
+
63
+ # print("".join(result))
64
+
65
+ # Loading the response as a JSON object
66
+ # json_response = json.loads(response.choices[0].message.function_call.arguments)
67
+ # print(json_response)
@@ -13,6 +13,7 @@ langtrace.init(write_to_langtrace_cloud=False)
13
13
 
14
14
  client = OpenAI()
15
15
 
16
+
16
17
  # Example dummy function hard coded to return the same weather
17
18
  # In production, this could be your backend API or an external API
18
19
  def get_current_weather(location, unit="fahrenheit"):
@@ -89,4 +90,4 @@ def run_conversation():
89
90
  messages=messages,
90
91
  ) # get a new response from the model where it can see the function response
91
92
  # print(second_response)
92
- return second_response
93
+ return second_response
@@ -0,0 +1,50 @@
1
+ import cohere
2
+ from dotenv import find_dotenv, load_dotenv
3
+ from qdrant_client import QdrantClient
4
+ from qdrant_client.models import Batch, Distance, VectorParams
5
+
6
+ from langtrace_python_sdk import langtrace
7
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
8
+
9
+ _ = load_dotenv(find_dotenv())
10
+
11
+ langtrace.init(write_to_langtrace_cloud=False)
12
+
13
+
14
+ @with_langtrace_root_span()
15
+ def basic():
16
+ client = QdrantClient(":memory:")
17
+ cohere_client = cohere.Client()
18
+
19
+ client.create_collection(collection_name="MyCollection4", vectors_config=VectorParams(
20
+ size=1024,
21
+ distance=Distance.COSINE,
22
+ ))
23
+
24
+ client.upsert(
25
+ collection_name="MyCollection4",
26
+ points=Batch(
27
+ ids=[1],
28
+ vectors=cohere_client.embed(
29
+ model="embed-english-v3.0", # New Embed v3 model
30
+ input_type="search_document", # Input type for documents
31
+ texts=["Qdrant is the a vector database written in Rust"],
32
+ ).embeddings,
33
+ ),
34
+ )
35
+
36
+ answer = client.search(
37
+ collection_name="MyCollection4",
38
+ query_vector=cohere_client.embed(
39
+ model="embed-english-v3.0", # New Embed v3 model
40
+ input_type="search_query", # Input type for search queries
41
+ texts=["Which database is written in Rust?"],
42
+ ).embeddings[0],
43
+
44
+ )
45
+ print(answer[0])
46
+
47
+ return answer
48
+
49
+
50
+ basic()
@@ -0,0 +1,20 @@
1
+ """
2
+ Copyright (c) 2024 Scale3 Labs
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ from langtrace_python_sdk import langtrace
18
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
19
+
20
+ __all__ = ["langtrace", "with_langtrace_root_span"]
@@ -4,7 +4,7 @@ APIS = {
4
4
  "METHOD": "cohere.client.chat",
5
5
  "ENDPOINT": "/v1/chat",
6
6
  },
7
- "EMBED_CREATE": {
7
+ "EMBED": {
8
8
  "URL": "https://api.cohere.ai",
9
9
  "METHOD": "cohere.client.embed",
10
10
  "ENDPOINT": "/v1/embed",
@@ -14,4 +14,9 @@ APIS = {
14
14
  "METHOD": "cohere.client.chat_stream",
15
15
  "ENDPOINT": "/v1/messages",
16
16
  },
17
+ "RERANK": {
18
+ "URL": "https://api.cohere.ai",
19
+ "METHOD": "cohere.client.rerank",
20
+ "ENDPOINT": "/v1/rerank",
21
+ },
17
22
  }