langtrace-python-sdk 2.1.28__tar.gz → 2.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (199) hide show
  1. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/PKG-INFO +26 -19
  2. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/README.md +21 -17
  3. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/pyproject.toml +5 -2
  4. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/cohere_example/chat.py +1 -0
  5. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/cohere_example/chat_stream.py +3 -0
  6. langtrace_python_sdk-2.2.1/src/examples/gemini_example/__init__.py +6 -0
  7. langtrace_python_sdk-2.2.1/src/examples/gemini_example/function_tools.py +62 -0
  8. langtrace_python_sdk-2.2.1/src/examples/gemini_example/main.py +91 -0
  9. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/langchain_example/__init__.py +8 -0
  10. langtrace_python_sdk-2.2.1/src/examples/langchain_example/groq_example.py +45 -0
  11. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/ollama_example/basic.py +1 -0
  12. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/__init__.py +1 -0
  13. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/async_tool_calling_nonstreaming.py +1 -1
  14. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/chat_completion.py +1 -1
  15. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/embeddings_create.py +1 -0
  16. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/images_edit.py +2 -2
  17. langtrace_python_sdk-2.2.1/src/examples/vertexai_example/__init__.py +6 -0
  18. langtrace_python_sdk-2.2.1/src/examples/vertexai_example/main.py +214 -0
  19. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/common.py +2 -0
  20. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/constants/instrumentation/gemini.py +12 -0
  21. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/constants/instrumentation/vertexai.py +42 -0
  22. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/__init__.py +4 -0
  23. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +179 -0
  24. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/chroma/patch.py +29 -29
  25. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/cohere/patch.py +143 -242
  26. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/gemini/__init__.py +3 -0
  27. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/gemini/instrumentation.py +36 -0
  28. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/gemini/patch.py +186 -0
  29. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/groq/patch.py +82 -125
  30. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/ollama/patch.py +62 -65
  31. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/openai/patch.py +651 -0
  32. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/qdrant/patch.py +6 -6
  33. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/vertexai/__init__.py +3 -0
  34. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/vertexai/instrumentation.py +33 -0
  35. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/instrumentation/vertexai/patch.py +131 -0
  36. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/langtrace.py +7 -1
  37. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/utils/__init__.py +24 -0
  38. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/utils/llm.py +384 -0
  39. langtrace_python_sdk-2.2.1/src/langtrace_python_sdk/version.py +1 -0
  40. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/run_example.py +22 -1
  41. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/anthropic/test_anthropic.py +28 -27
  42. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/test_cohere_chat.py +36 -36
  43. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/test_cohere_embed.py +12 -9
  44. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/test_cohere_rerank.py +18 -11
  45. langtrace_python_sdk-2.2.1/src/tests/groq/cassettes/test_async_chat_completion.yaml +113 -0
  46. langtrace_python_sdk-2.2.1/src/tests/groq/cassettes/test_async_chat_completion_streaming.yaml +2232 -0
  47. langtrace_python_sdk-2.2.1/src/tests/groq/cassettes/test_chat_completion.yaml +114 -0
  48. langtrace_python_sdk-2.2.1/src/tests/groq/cassettes/test_chat_completion_streaming.yaml +2512 -0
  49. langtrace_python_sdk-2.2.1/src/tests/groq/conftest.py +33 -0
  50. langtrace_python_sdk-2.2.1/src/tests/groq/test_groq.py +142 -0
  51. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/cassettes/test_async_chat_completion_streaming.yaml +28 -28
  52. langtrace_python_sdk-2.2.1/src/tests/openai/test_chat_completion.py +132 -0
  53. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/test_image_generation.py +47 -24
  54. langtrace_python_sdk-2.2.1/src/tests/utils.py +79 -0
  55. langtrace_python_sdk-2.1.28/src/examples/langchain_example/groq_example.py +0 -32
  56. langtrace_python_sdk-2.1.28/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +0 -207
  57. langtrace_python_sdk-2.1.28/src/langtrace_python_sdk/instrumentation/openai/patch.py +0 -955
  58. langtrace_python_sdk-2.1.28/src/langtrace_python_sdk/utils/__init__.py +0 -13
  59. langtrace_python_sdk-2.1.28/src/langtrace_python_sdk/utils/llm.py +0 -79
  60. langtrace_python_sdk-2.1.28/src/langtrace_python_sdk/version.py +0 -1
  61. langtrace_python_sdk-2.1.28/src/tests/openai/test_chat_completion.py +0 -146
  62. langtrace_python_sdk-2.1.28/src/tests/utils.py +0 -44
  63. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/.gitignore +0 -0
  64. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/LICENSE +0 -0
  65. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/__init__.py +0 -0
  66. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/__init__.py +0 -0
  67. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/anthropic_example/__init__.py +0 -0
  68. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/anthropic_example/completion.py +0 -0
  69. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/chroma_example/__init__.py +0 -0
  70. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/chroma_example/basic.py +0 -0
  71. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/cohere_example/__init__.py +0 -0
  72. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/cohere_example/embed.py +0 -0
  73. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/cohere_example/rerank.py +0 -0
  74. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/cohere_example/tools.py +0 -0
  75. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/crewai_example/basic.py +0 -0
  76. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/dspy_example/math_problems_cot.py +0 -0
  77. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/dspy_example/program_of_thought_basic.py +0 -0
  78. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/dspy_example/quiz_gen.py +0 -0
  79. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/dspy_example/react.py +0 -0
  80. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/fastapi_example/__init__.py +0 -0
  81. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/fastapi_example/basic_route.py +0 -0
  82. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/hiveagent_example/basic.py +0 -0
  83. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/inspect_ai_example/basic_eval.py +0 -0
  84. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/langchain_example/basic.py +0 -0
  85. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/langchain_example/langgraph_example.py +0 -0
  86. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/langchain_example/tool.py +0 -0
  87. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/llamaindex_example/__init__.py +0 -0
  88. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/llamaindex_example/agent.py +0 -0
  89. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/llamaindex_example/basic.py +0 -0
  90. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/llamaindex_example/data/abramov.txt +0 -0
  91. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/ollama_example/__init__.py +0 -0
  92. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/async_tool_calling_streaming.py +0 -0
  93. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/function_calling.py +0 -0
  94. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/images_generate.py +0 -0
  95. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/resources/lounge_flamingo.png +0 -0
  96. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/resources/mask.png +0 -0
  97. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/tool_calling.py +0 -0
  98. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/tool_calling_nonstreaming.py +0 -0
  99. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/openai_example/tool_calling_streaming.py +0 -0
  100. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/perplexity_example/basic.py +0 -0
  101. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/pinecone_example/__init__.py +0 -0
  102. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/pinecone_example/basic.py +0 -0
  103. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/qdrant_example/__init__.py +0 -0
  104. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/qdrant_example/basic.py +0 -0
  105. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/weaviate_example/__init__.py +0 -0
  106. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/examples/weaviate_example/query_text.py +0 -0
  107. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/__init__.py +0 -0
  108. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/__init__.py +0 -0
  109. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/exporter/langtrace_exporter.py +0 -0
  110. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/__init__.py +0 -0
  111. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/anthropic.py +0 -0
  112. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/chroma.py +0 -0
  113. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/cohere.py +0 -0
  114. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/groq.py +0 -0
  115. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/ollama.py +0 -0
  116. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/openai.py +0 -0
  117. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/pinecone.py +0 -0
  118. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/qdrant.py +0 -0
  119. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/constants/instrumentation/weaviate.py +0 -0
  120. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/extensions/__init__.py +0 -0
  121. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/extensions/langtrace_exporter.py +0 -0
  122. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/extensions/langtrace_filesystem.py +0 -0
  123. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/anthropic/__init__.py +0 -0
  124. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py +0 -0
  125. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/chroma/__init__.py +0 -0
  126. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py +0 -0
  127. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/cohere/__init__.py +0 -0
  128. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/cohere/instrumentation.py +0 -0
  129. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/crewai/__init__.py +0 -0
  130. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/crewai/instrumentation.py +0 -0
  131. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/crewai/patch.py +0 -0
  132. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/dspy/__init__.py +0 -0
  133. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/dspy/instrumentation.py +0 -0
  134. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/dspy/patch.py +0 -0
  135. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/groq/__init__.py +0 -0
  136. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/groq/instrumentation.py +0 -0
  137. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain/__init__.py +0 -0
  138. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py +0 -0
  139. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain/patch.py +0 -0
  140. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain_community/__init__.py +0 -0
  141. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain_community/instrumentation.py +0 -0
  142. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain_community/patch.py +0 -0
  143. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain_core/__init__.py +0 -0
  144. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py +0 -0
  145. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langchain_core/patch.py +0 -0
  146. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langgraph/__init__.py +0 -0
  147. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langgraph/instrumentation.py +0 -0
  148. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/langgraph/patch.py +0 -0
  149. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/llamaindex/__init__.py +0 -0
  150. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +0 -0
  151. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/llamaindex/patch.py +0 -0
  152. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/ollama/__init__.py +0 -0
  153. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/ollama/instrumentation.py +0 -0
  154. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/openai/__init__.py +0 -0
  155. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/openai/instrumentation.py +0 -0
  156. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/pinecone/__init__.py +0 -0
  157. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/pinecone/instrumentation.py +0 -0
  158. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/pinecone/patch.py +0 -0
  159. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/qdrant/__init__.py +0 -0
  160. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/qdrant/instrumentation.py +0 -0
  161. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/weaviate/__init__.py +0 -0
  162. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/weaviate/instrumentation.py +0 -0
  163. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/instrumentation/weaviate/patch.py +0 -0
  164. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/types/__init__.py +0 -0
  165. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/langtrace_sampler.py +0 -0
  166. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/misc.py +0 -0
  167. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/prompt_registry.py +0 -0
  168. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/sdk_version_checker.py +0 -0
  169. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/silently_fail.py +0 -0
  170. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/types.py +0 -0
  171. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/langtrace_python_sdk/utils/with_root_span.py +0 -0
  172. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/__init__.py +0 -0
  173. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/anthropic/cassettes/test_anthropic.yaml +0 -0
  174. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/anthropic/cassettes/test_anthropic_streaming.yaml +0 -0
  175. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/anthropic/cassettes/test_async_anthropic_streaming.yaml +0 -0
  176. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/anthropic/conftest.py +0 -0
  177. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/chroma/conftest.py +0 -0
  178. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/chroma/test_chroma.py +0 -0
  179. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/cassettes/test_cohere_chat.yaml +0 -0
  180. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/cassettes/test_cohere_chat_streaming.yaml +0 -0
  181. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/cassettes/test_cohere_embed.yaml +0 -0
  182. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/cassettes/test_cohere_rerank.yaml +0 -0
  183. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/cohere/conftest.py +0 -0
  184. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/conftest.py +0 -0
  185. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/langchain/cassettes/test_langchain.yaml +0 -0
  186. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/langchain/conftest.py +0 -0
  187. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/langchain/test_langchain.py +0 -0
  188. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/cassettes/test_async_image_generation.yaml +0 -0
  189. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/cassettes/test_chat_completion.yaml +0 -0
  190. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/cassettes/test_chat_completion_streaming.yaml +0 -0
  191. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/cassettes/test_image_generation.yaml +0 -0
  192. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/conftest.py +0 -0
  193. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/openai/test_embeddings.py +0 -0
  194. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/pinecone/cassettes/test_query.yaml +0 -0
  195. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/pinecone/cassettes/test_upsert.yaml +0 -0
  196. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/pinecone/conftest.py +0 -0
  197. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/pinecone/test_pinecone.py +0 -0
  198. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/qdrant/conftest.py +0 -0
  199. {langtrace_python_sdk-2.1.28 → langtrace_python_sdk-2.2.1}/src/tests/qdrant/test_qdrant.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: langtrace-python-sdk
3
- Version: 2.1.28
3
+ Version: 2.2.1
4
4
  Summary: Python SDK for LangTrace
5
5
  Project-URL: Homepage, https://github.com/Scale3-Labs/langtrace-python-sdk
6
6
  Author-email: Scale3 Labs <engineering@scale3labs.com>
@@ -20,11 +20,14 @@ Requires-Dist: opentelemetry-instrumentation>=0.46b0
20
20
  Requires-Dist: opentelemetry-sdk>=1.25.0
21
21
  Requires-Dist: sqlalchemy
22
22
  Requires-Dist: tiktoken>=0.1.1
23
- Requires-Dist: trace-attributes<6.0.0,>=5.0.0
23
+ Requires-Dist: trace-attributes<7.0.0,>=6.0.0
24
24
  Provides-Extra: dev
25
25
  Requires-Dist: anthropic; extra == 'dev'
26
26
  Requires-Dist: chromadb; extra == 'dev'
27
27
  Requires-Dist: cohere; extra == 'dev'
28
+ Requires-Dist: google-cloud-aiplatform; extra == 'dev'
29
+ Requires-Dist: google-generativeai; extra == 'dev'
30
+ Requires-Dist: groq; extra == 'dev'
28
31
  Requires-Dist: langchain; extra == 'dev'
29
32
  Requires-Dist: langchain-community; extra == 'dev'
30
33
  Requires-Dist: langchain-openai; extra == 'dev'
@@ -274,23 +277,27 @@ prompt = get_prompt_from_registry(<Registry ID>, options={"prompt_version": 1, "
274
277
 
275
278
  Langtrace automatically captures traces from the following vendors:
276
279
 
277
- | Vendor | Type | Typescript SDK | Python SDK |
278
- | ------------ | --------------- | ------------------ | ------------------ |
279
- | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
280
- | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
281
- | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
282
- | Cohere | LLM | :white_check_mark: | :white_check_mark: |
283
- | Groq | LLM | :x: | :white_check_mark: |
284
- | Langchain | Framework | :x: | :white_check_mark: |
285
- | Langgraph | Framework | :x: | :white_check_mark: |
286
- | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
287
- | DSPy | Framework | :x: | :white_check_mark: |
288
- | CrewAI | Framework | :x: | :white_check_mark: |
289
- | Ollama | Framework | :x: | :white_check_mark: |
290
- | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
291
- | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
292
- | Weaviate | Vector Database | :white_check_mark: | :white_check_mark: |
293
- | QDrant | Vector Database | :x: | :white_check_mark: |
280
+ | Vendor | Type | Typescript SDK | Python SDK |
281
+ | ------------ | --------------- | ------------------ | ------------------------------- |
282
+ | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
283
+ | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
284
+ | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
285
+ | Cohere | LLM | :white_check_mark: | :white_check_mark: |
286
+ | Groq | LLM | :x: | :white_check_mark: |
287
+ | Perplexity | LLM | :white_check_mark: | :white_check_mark: |
288
+ | Gemini | LLM | :x: | :white_check_mark: |
289
+ | Langchain | Framework | :x: | :white_check_mark: |
290
+ | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
291
+ | Langgraph | Framework | :x: | :white_check_mark: |
292
+ | DSPy | Framework | :x: | :white_check_mark: |
293
+ | CrewAI | Framework | :x: | :white_check_mark: |
294
+ | Ollama | Framework | :x: | :white_check_mark: |
295
+ | VertexAI | Framework | :x: | :white_check_mark: |
296
+ | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
297
+ | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
298
+ | QDrant | Vector Database | :white_check_mark: | :white_check_mark: |
299
+ | Weaviate | Vector Database | :white_check_mark: | :white_check_mark: |
300
+ | PGVector | Vector Database | :white_check_mark: | :white_check_mark: (SQLAlchemy) |
294
301
 
295
302
  ---
296
303
 
@@ -232,23 +232,27 @@ prompt = get_prompt_from_registry(<Registry ID>, options={"prompt_version": 1, "
232
232
 
233
233
  Langtrace automatically captures traces from the following vendors:
234
234
 
235
- | Vendor | Type | Typescript SDK | Python SDK |
236
- | ------------ | --------------- | ------------------ | ------------------ |
237
- | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
238
- | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
239
- | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
240
- | Cohere | LLM | :white_check_mark: | :white_check_mark: |
241
- | Groq | LLM | :x: | :white_check_mark: |
242
- | Langchain | Framework | :x: | :white_check_mark: |
243
- | Langgraph | Framework | :x: | :white_check_mark: |
244
- | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
245
- | DSPy | Framework | :x: | :white_check_mark: |
246
- | CrewAI | Framework | :x: | :white_check_mark: |
247
- | Ollama | Framework | :x: | :white_check_mark: |
248
- | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
249
- | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
250
- | Weaviate | Vector Database | :white_check_mark: | :white_check_mark: |
251
- | QDrant | Vector Database | :x: | :white_check_mark: |
235
+ | Vendor | Type | Typescript SDK | Python SDK |
236
+ | ------------ | --------------- | ------------------ | ------------------------------- |
237
+ | OpenAI | LLM | :white_check_mark: | :white_check_mark: |
238
+ | Anthropic | LLM | :white_check_mark: | :white_check_mark: |
239
+ | Azure OpenAI | LLM | :white_check_mark: | :white_check_mark: |
240
+ | Cohere | LLM | :white_check_mark: | :white_check_mark: |
241
+ | Groq | LLM | :x: | :white_check_mark: |
242
+ | Perplexity | LLM | :white_check_mark: | :white_check_mark: |
243
+ | Gemini | LLM | :x: | :white_check_mark: |
244
+ | Langchain | Framework | :x: | :white_check_mark: |
245
+ | LlamaIndex | Framework | :white_check_mark: | :white_check_mark: |
246
+ | Langgraph | Framework | :x: | :white_check_mark: |
247
+ | DSPy | Framework | :x: | :white_check_mark: |
248
+ | CrewAI | Framework | :x: | :white_check_mark: |
249
+ | Ollama | Framework | :x: | :white_check_mark: |
250
+ | VertexAI | Framework | :x: | :white_check_mark: |
251
+ | Pinecone | Vector Database | :white_check_mark: | :white_check_mark: |
252
+ | ChromaDB | Vector Database | :white_check_mark: | :white_check_mark: |
253
+ | QDrant | Vector Database | :white_check_mark: | :white_check_mark: |
254
+ | Weaviate | Vector Database | :white_check_mark: | :white_check_mark: |
255
+ | PGVector | Vector Database | :white_check_mark: | :white_check_mark: (SQLAlchemy) |
252
256
 
253
257
  ---
254
258
 
@@ -18,7 +18,7 @@ classifiers=[
18
18
  "Operating System :: OS Independent",
19
19
  ]
20
20
  dependencies = [
21
- 'trace-attributes>=5.0.0,<6.0.0',
21
+ 'trace-attributes>=6.0.0,<7.0.0',
22
22
  'opentelemetry-api>=1.25.0',
23
23
  'opentelemetry-sdk>=1.25.0',
24
24
  'opentelemetry-instrumentation>=0.46b0',
@@ -49,7 +49,10 @@ dev = [
49
49
  "cohere",
50
50
  "qdrant_client",
51
51
  "weaviate-client",
52
- "ollama"
52
+ "ollama",
53
+ "groq",
54
+ "google-generativeai",
55
+ "google-cloud-aiplatform"
53
56
  ]
54
57
 
55
58
  test = [
@@ -23,6 +23,7 @@ def chat_comp():
23
23
  "message": "The man who is widely credited with discovering gravity is Sir Isaac Newton",
24
24
  },
25
25
  ],
26
+ k=3,
26
27
  message="Tell me a story in 3 sentences or less?",
27
28
  preamble="answer like a pirate",
28
29
  # perform web search before answering the question. You can also use your own custom connector.
@@ -18,6 +18,9 @@ def chat_stream():
18
18
  message="Tell me a short story in 2 lines",
19
19
  preamble="Respond like a pirate",
20
20
  max_tokens=100,
21
+ k=3,
22
+ p=0.9,
23
+ temperature=0.5,
21
24
  ):
22
25
  if event.event_type == "text-generation":
23
26
  result.append(event.text)
@@ -0,0 +1,6 @@
1
+ from .main import basic
2
+
3
+
4
+ class GeminiRunner:
5
+ def run(self):
6
+ basic()
@@ -0,0 +1,62 @@
1
+ tools = [
2
+ {
3
+ "function_declarations": [
4
+ {
5
+ "name": "find_movies",
6
+ "description": "find movie titles currently playing in theaters based on any description, genre, title words, etc.",
7
+ "parameters": {
8
+ "type": "object",
9
+ "properties": {
10
+ "location": {
11
+ "type": "string",
12
+ "description": "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616",
13
+ },
14
+ "description": {
15
+ "type": "string",
16
+ "description": "Any kind of description including category or genre, title words, attributes, etc.",
17
+ },
18
+ },
19
+ "required": ["description"],
20
+ },
21
+ },
22
+ {
23
+ "name": "find_theaters",
24
+ "description": "find theaters based on location and optionally movie title which is currently playing in theaters",
25
+ "parameters": {
26
+ "type": "object",
27
+ "properties": {
28
+ "location": {
29
+ "type": "string",
30
+ "description": "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616",
31
+ },
32
+ "movie": {"type": "string", "description": "Any movie title"},
33
+ },
34
+ "required": ["location"],
35
+ },
36
+ },
37
+ {
38
+ "name": "get_showtimes",
39
+ "description": "Find the start times for movies playing in a specific theater",
40
+ "parameters": {
41
+ "type": "object",
42
+ "properties": {
43
+ "location": {
44
+ "type": "string",
45
+ "description": "The city and state, e.g. San Francisco, CA or a zip code e.g. 95616",
46
+ },
47
+ "movie": {"type": "string", "description": "Any movie title"},
48
+ "theater": {
49
+ "type": "string",
50
+ "description": "Name of the theater",
51
+ },
52
+ "date": {
53
+ "type": "string",
54
+ "description": "Date for requested showtime",
55
+ },
56
+ },
57
+ "required": ["location", "movie", "theater", "date"],
58
+ },
59
+ },
60
+ ]
61
+ }
62
+ ]
@@ -0,0 +1,91 @@
1
+ from langtrace_python_sdk import langtrace
2
+ import google.generativeai as genai
3
+ from dotenv import load_dotenv
4
+ import os
5
+ import asyncio
6
+ import pathlib
7
+ from .function_tools import tools
8
+
9
+ load_dotenv()
10
+
11
+ langtrace.init(write_spans_to_console=False, batch=False)
12
+ genai.configure(api_key=os.environ["GEMINI_API_KEY"])
13
+
14
+
15
+ async def async_demo():
16
+ task1 = asyncio.create_task(async_generate())
17
+ task2 = asyncio.create_task(async_generate(stream=True))
18
+ return await asyncio.gather(task1, task2)
19
+
20
+
21
+ def basic():
22
+ generate()
23
+ generate(stream=True, with_tools=True)
24
+
25
+ # image_to_text()
26
+ # audio_to_text()
27
+ asyncio.run(async_demo())
28
+
29
+
30
+ def generate(stream=False, with_tools=False):
31
+ model = genai.GenerativeModel(
32
+ "gemini-1.5-pro", system_instruction="You are a cat. Your name is Neko."
33
+ )
34
+
35
+ response = model.generate_content(
36
+ "Write a story about a AI and magic",
37
+ stream=stream,
38
+ tools=tools if with_tools else None,
39
+ )
40
+ if stream:
41
+ for res in response:
42
+ if res.text:
43
+ print(res.text)
44
+ else:
45
+ print(response.text)
46
+
47
+
48
+ async def async_generate(stream=False):
49
+ model = genai.GenerativeModel(
50
+ "gemini-1.5-pro", system_instruction="You are a cat. Your name is Neko."
51
+ )
52
+ response = await model.generate_content_async(
53
+ "Write a story about a AI and magic", stream=stream
54
+ )
55
+ if stream:
56
+ async for chunk in response:
57
+ if chunk.text:
58
+ print(chunk.text)
59
+ else:
60
+ print(response.text)
61
+
62
+
63
+ def image_to_text(stream=False):
64
+ model = genai.GenerativeModel("gemini-1.5-flash")
65
+ image1 = {
66
+ "mime_type": "image/jpeg",
67
+ "data": pathlib.Path("src/examples/gemini_example/jetpack.jpg").read_bytes(),
68
+ }
69
+
70
+ prompt = "Describe me this picture. What do you see in it."
71
+ response = model.generate_content([prompt, image1], stream=stream)
72
+ if stream:
73
+ for res in response:
74
+ print(res.text)
75
+ else:
76
+ print(response.text)
77
+
78
+
79
+ # def audio_to_text(stream=False):
80
+ # model = genai.GenerativeModel("gemini-1.5-flash")
81
+ # audio = genai.upload_file(
82
+ # pathlib.Path("src/examples/gemini_example/voice_note.mp3")
83
+ # )
84
+
85
+ # prompt = "Summarize this voice recording."
86
+ # response = model.generate_content([prompt, audio], stream=stream)
87
+ # if stream:
88
+ # for res in response:
89
+ # print(res.text)
90
+ # else:
91
+ # print(response.text)
@@ -1,6 +1,8 @@
1
1
  from .basic import basic_app, rag, load_and_split
2
2
  from langtrace_python_sdk import with_langtrace_root_span
3
3
 
4
+ from .groq_example import groq_basic, groq_streaming
5
+
4
6
 
5
7
  class LangChainRunner:
6
8
  @with_langtrace_root_span("LangChain")
@@ -8,3 +10,9 @@ class LangChainRunner:
8
10
  basic_app()
9
11
  rag()
10
12
  load_and_split()
13
+
14
+
15
+ class GroqRunner:
16
+ @with_langtrace_root_span("Groq")
17
+ def run(self):
18
+ groq_streaming()
@@ -0,0 +1,45 @@
1
+ from dotenv import find_dotenv, load_dotenv
2
+ from langchain_core.prompts import ChatPromptTemplate
3
+ from langchain_groq import ChatGroq
4
+ from groq import Groq
5
+
6
+ _ = load_dotenv(find_dotenv())
7
+
8
+ from langtrace_python_sdk import langtrace
9
+
10
+ # from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
11
+
12
+ _ = load_dotenv(find_dotenv())
13
+
14
+ langtrace.init()
15
+
16
+ client = Groq()
17
+
18
+
19
+ def groq_basic():
20
+ chat_completion = client.chat.completions.create(
21
+ messages=[
22
+ {
23
+ "role": "user",
24
+ "content": "Explain the importance of low latency LLMs",
25
+ }
26
+ ],
27
+ stream=False,
28
+ model="llama3-8b-8192",
29
+ )
30
+ return chat_completion
31
+
32
+
33
+ def groq_streaming():
34
+ chat_completion = client.chat.completions.create(
35
+ messages=[
36
+ {
37
+ "role": "user",
38
+ "content": "Explain the importance of low latency LLMs",
39
+ }
40
+ ],
41
+ stream=True,
42
+ model="llama3-8b-8192",
43
+ )
44
+ for chunk in chat_completion:
45
+ print(chunk)
@@ -17,6 +17,7 @@ def chat():
17
17
  "content": "hi",
18
18
  },
19
19
  ],
20
+ options={"temperature": 0.5},
20
21
  stream=True,
21
22
  )
22
23
 
@@ -10,6 +10,7 @@ class OpenAIRunner:
10
10
  run_conversation as run_conversation_streaming,
11
11
  )
12
12
  from .chat_completion import chat_completion as chat_completion_example
13
+
13
14
  from .embeddings_create import embeddings_create as embeddings_create_example
14
15
  from .function_calling import function_calling as function_example
15
16
  from .images_edit import image_edit
@@ -8,7 +8,7 @@ from langtrace_python_sdk import langtrace, with_langtrace_root_span
8
8
 
9
9
  _ = load_dotenv(find_dotenv())
10
10
 
11
- langtrace.init()
11
+ langtrace.init(write_spans_to_console=True)
12
12
 
13
13
  client = AsyncOpenAI()
14
14
 
@@ -9,7 +9,7 @@ from langtrace_python_sdk.utils.with_root_span import (
9
9
 
10
10
  _ = load_dotenv(find_dotenv())
11
11
 
12
- langtrace.init(write_spans_to_console=False)
12
+ langtrace.init(write_spans_to_console=True)
13
13
  client = OpenAI()
14
14
 
15
15
 
@@ -16,5 +16,6 @@ def embeddings_create():
16
16
  result = client.embeddings.create(
17
17
  model="text-embedding-ada-002",
18
18
  input="Once upon a time, there was a pirate.",
19
+ encoding_format="float",
19
20
  )
20
21
  return result
@@ -23,8 +23,8 @@ def image_edit():
23
23
 
24
24
  response = client.images.edit(
25
25
  model="dall-e-2",
26
- image=open("./resources/lounge_flamingo.png", "rb"),
27
- mask=open("./resources/mask.png", "rb"),
26
+ image=open("src/examples/openai_example/resources/lounge_flamingo.png", "rb"),
27
+ mask=open("src/examples/openai_example/resources/mask.png", "rb"),
28
28
  prompt="A sunlit indoor lounge area with a pool and duck standing in side with flamingo.",
29
29
  n=1,
30
30
  size="1024x1024",
@@ -0,0 +1,6 @@
1
+ from .main import basic
2
+
3
+
4
+ class VertexAIRunner:
5
+ def run(self):
6
+ basic()
@@ -0,0 +1,214 @@
1
+ import vertexai
2
+ import base64
3
+ import asyncio
4
+ import vertexai.preview.generative_models as generative_models
5
+ from vertexai.language_models import ChatModel, InputOutputTextPair, TextGenerationModel
6
+ from langtrace_python_sdk import langtrace
7
+ from vertexai.generative_models import GenerativeModel, Part, FinishReason
8
+ from dotenv import load_dotenv
9
+
10
+ load_dotenv()
11
+
12
+ langtrace.init(write_spans_to_console=True, batch=False)
13
+ vertexai.init(project="model-palace-429011-f5", location="us-central1")
14
+
15
+
16
+ def basic():
17
+ # chat()
18
+ # chat_streaming()
19
+ # streaming_prediction()
20
+ # asyncio.run(async_streaming_prediction())
21
+
22
+ generate()
23
+ generate(stream=True)
24
+
25
+ image_to_text()
26
+ image_to_text(stream=True)
27
+
28
+ video_to_text()
29
+ video_to_text(stream=True)
30
+
31
+ audio_to_text()
32
+ audio_to_text(stream=True)
33
+
34
+
35
+ def chat():
36
+ """Chat Example with a Large Language Model"""
37
+
38
+ chat_model = ChatModel.from_pretrained("chat-bison")
39
+
40
+ parameters = {
41
+ "temperature": 0.8,
42
+ "max_output_tokens": 256,
43
+ "top_p": 0.95,
44
+ "top_k": 40,
45
+ }
46
+
47
+ chat = chat_model.start_chat(
48
+ context="My name is Miles. You are an astronomer, knowledgeable about the solar system.",
49
+ examples=[
50
+ InputOutputTextPair(
51
+ input_text="How many moons does Mars have?",
52
+ output_text="The planet Mars has two moons, Phobos and Deimos.",
53
+ ),
54
+ ],
55
+ )
56
+
57
+ response = chat.send_message(
58
+ message="How many planets are there in the solar system?", **parameters
59
+ )
60
+
61
+ return response
62
+
63
+
64
+ def chat_streaming() -> str:
65
+ """Streaming Chat Example with a Large Language Model"""
66
+
67
+ chat_model = ChatModel.from_pretrained("chat-bison")
68
+
69
+ parameters = {
70
+ "temperature": 0.8,
71
+ "max_output_tokens": 256,
72
+ "top_p": 0.95,
73
+ "top_k": 40,
74
+ }
75
+
76
+ chat = chat_model.start_chat(
77
+ context="My name is Miles. You are an astronomer, knowledgeable about the solar system.",
78
+ examples=[
79
+ InputOutputTextPair(
80
+ input_text="How many moons does Mars have?",
81
+ output_text="The planet Mars has two moons, Phobos and Deimos.",
82
+ ),
83
+ ],
84
+ )
85
+
86
+ responses = chat.send_message_streaming(
87
+ message="How many planets are there in the solar system?", **parameters
88
+ )
89
+
90
+ result = [response for response in responses]
91
+ return result
92
+
93
+
94
+ def streaming_prediction() -> str:
95
+ """Streaming Text Example with a Large Language Model"""
96
+
97
+ text_generation_model = TextGenerationModel.from_pretrained("text-bison")
98
+ parameters = {
99
+ "max_output_tokens": 256,
100
+ "top_p": 0.8,
101
+ "top_k": 40,
102
+ }
103
+ responses = text_generation_model.predict_streaming(
104
+ prompt="Give me ten interview questions for the role of program manager.",
105
+ **parameters,
106
+ )
107
+ result = [response for response in responses]
108
+ print(result)
109
+ return result
110
+
111
+
112
+ async def async_streaming_prediction() -> str:
113
+ """Async Streaming Text Example with a Large Language Model"""
114
+
115
+ text_generation_model = TextGenerationModel.from_pretrained("text-bison")
116
+ parameters = {
117
+ "max_output_tokens": 256,
118
+ "top_p": 0.8,
119
+ "top_k": 40,
120
+ }
121
+
122
+ responses = text_generation_model.predict_streaming_async(
123
+ prompt="Give me ten interview questions for the role of program manager.",
124
+ **parameters,
125
+ )
126
+
127
+ result = [response async for response in responses]
128
+ print(result)
129
+ return result
130
+
131
+
132
+ def generate(stream=False):
133
+ generation_config = {
134
+ "max_output_tokens": 8192,
135
+ "temperature": 1,
136
+ "top_p": 0.95,
137
+ }
138
+ model = GenerativeModel(
139
+ "gemini-experimental",
140
+ )
141
+
142
+ responses = model.generate_content(
143
+ ["I am a software engineer. I enjoy playing video games and reading"],
144
+ generation_config=generation_config,
145
+ stream=stream,
146
+ )
147
+
148
+ if stream:
149
+ for res in responses:
150
+ print(res.text)
151
+ else:
152
+ print(responses.text)
153
+
154
+
155
+ def image_to_text(stream=False):
156
+ model = GenerativeModel(model_name="gemini-experimental")
157
+
158
+ response = model.generate_content(
159
+ [
160
+ Part.from_uri(
161
+ "gs://cloud-samples-data/generative-ai/image/scones.jpg",
162
+ mime_type="image/jpeg",
163
+ ),
164
+ "What is shown in this image?",
165
+ ],
166
+ stream=stream,
167
+ )
168
+ if stream:
169
+ for res in response:
170
+ print(res.text)
171
+ else:
172
+ print(response.text)
173
+
174
+
175
+ def video_to_text(stream=False):
176
+ model = GenerativeModel(model_name="gemini-experimental")
177
+
178
+ prompt = """
179
+ Provide a description of the video.
180
+ The description should also contain anything important which people say in the video.
181
+ """
182
+
183
+ video_file_uri = "gs://cloud-samples-data/generative-ai/video/pixel8.mp4"
184
+ video_file = Part.from_uri(video_file_uri, mime_type="video/mp4")
185
+
186
+ contents = [video_file, prompt]
187
+ response = model.generate_content(contents, stream=stream)
188
+ if stream:
189
+ for res in response:
190
+ print(res.text)
191
+ else:
192
+ print(response.text)
193
+
194
+
195
+ def audio_to_text(stream=False):
196
+ model = GenerativeModel(model_name="gemini-1.5-flash-001")
197
+
198
+ prompt = """
199
+ Please provide a summary for the audio.
200
+ Provide chapter titles, be concise and short, no need to provide chapter summaries.
201
+ Do not make up any information that is not part of the audio and do not be verbose.
202
+ """
203
+
204
+ audio_file_uri = "gs://cloud-samples-data/generative-ai/audio/pixel.mp3"
205
+ audio_file = Part.from_uri(audio_file_uri, mime_type="audio/mpeg")
206
+
207
+ contents = [audio_file, prompt]
208
+
209
+ response = model.generate_content(contents, stream=stream)
210
+ if stream:
211
+ for res in response:
212
+ print(res.text)
213
+ else:
214
+ print(response.text)