langtrace-python-sdk 2.0.0__py3-none-any.whl → 2.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,30 @@
1
+ from dotenv import find_dotenv, load_dotenv
2
+ from langchain_core.prompts import ChatPromptTemplate
3
+ from langchain_groq import ChatGroq
4
+
5
+ _ = load_dotenv(find_dotenv())
6
+
7
+ from langtrace_python_sdk import langtrace
8
+
9
+ # from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
10
+
11
+ _ = load_dotenv(find_dotenv())
12
+
13
+ langtrace.init()
14
+
15
+
16
+ def groq_example():
17
+
18
+ chat = ChatGroq(temperature=0, model_name="mixtral-8x7b-32768")
19
+
20
+ system = "You are a helpful assistant."
21
+ human = "{text}"
22
+ prompt = ChatPromptTemplate.from_messages([("system", system), ("human", human)])
23
+
24
+ chain = prompt | chat
25
+ result = chain.invoke({"text": "Explain the importance of low latency LLMs in 2 sentences or less."})
26
+ # print(result)
27
+ return result
28
+
29
+
30
+ groq_example()
@@ -0,0 +1,85 @@
1
+ import json
2
+
3
+ from dotenv import find_dotenv, load_dotenv
4
+ from langchain_core.messages import HumanMessage, ToolMessage
5
+ from langchain_core.tools import tool
6
+ from langchain_core.utils.function_calling import convert_to_openai_tool
7
+ from langchain_openai import ChatOpenAI
8
+ from langgraph.graph import END, MessageGraph
9
+
10
+ from langtrace_python_sdk import langtrace
11
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
12
+
13
+ _ = load_dotenv(find_dotenv())
14
+
15
+ langtrace.init()
16
+
17
+
18
+ @tool
19
+ def multiply(first_number: int, second_number: int):
20
+ """Multiplies two numbers together."""
21
+ return first_number * second_number
22
+
23
+
24
+ model = ChatOpenAI(temperature=0)
25
+ model_with_tools = model.bind(tools=[convert_to_openai_tool(multiply)])
26
+
27
+
28
+ def invoke_model(state):
29
+ return model_with_tools.invoke(state)
30
+
31
+
32
+ def router(state):
33
+ tool_calls = state[-1].additional_kwargs.get("tool_calls", [])
34
+ if len(tool_calls):
35
+ return "multiply"
36
+ else:
37
+ return "end"
38
+
39
+
40
+ def invoke_tool(state):
41
+ tool_calls = state[-1].additional_kwargs.get("tool_calls", [])
42
+ multiply_call = None
43
+
44
+ for tool_call in tool_calls:
45
+ if tool_call.get("function").get("name") == "multiply":
46
+ multiply_call = tool_call
47
+
48
+ if multiply_call is None:
49
+ raise Exception("No adder input found.")
50
+
51
+ res = multiply.invoke(
52
+ json.loads(multiply_call.get("function").get("arguments"))
53
+ )
54
+
55
+ return ToolMessage(
56
+ tool_call_id=multiply_call.get("id"),
57
+ content=res
58
+ )
59
+
60
+
61
+ @with_langtrace_root_span('langgraph_example')
62
+ def basic():
63
+
64
+ graph = MessageGraph()
65
+
66
+ graph.add_node("oracle", invoke_model)
67
+
68
+ graph.add_node("multiply", invoke_tool)
69
+
70
+ graph.add_conditional_edges("oracle", router, {
71
+ "multiply": "multiply",
72
+ "end": END,
73
+ })
74
+
75
+ graph.add_edge("multiply", END)
76
+
77
+ graph.set_entry_point("oracle")
78
+
79
+ runnable = graph.compile()
80
+
81
+ answer = runnable.invoke(HumanMessage("What is 1 + 1?"))
82
+ print(answer)
83
+
84
+
85
+ basic()
File without changes
@@ -0,0 +1,50 @@
1
+ import cohere
2
+ from dotenv import find_dotenv, load_dotenv
3
+ from qdrant_client import QdrantClient
4
+ from qdrant_client.models import Batch, Distance, VectorParams
5
+
6
+ from langtrace_python_sdk import langtrace
7
+ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
8
+
9
+ _ = load_dotenv(find_dotenv())
10
+
11
+ langtrace.init(write_to_langtrace_cloud=False)
12
+
13
+
14
+ @with_langtrace_root_span()
15
+ def basic():
16
+ client = QdrantClient(":memory:")
17
+ cohere_client = cohere.Client()
18
+
19
+ client.create_collection(collection_name="MyCollection4", vectors_config=VectorParams(
20
+ size=1024,
21
+ distance=Distance.COSINE,
22
+ ))
23
+
24
+ client.upsert(
25
+ collection_name="MyCollection4",
26
+ points=Batch(
27
+ ids=[1],
28
+ vectors=cohere_client.embed(
29
+ model="embed-english-v3.0", # New Embed v3 model
30
+ input_type="search_document", # Input type for documents
31
+ texts=["Qdrant is the a vector database written in Rust"],
32
+ ).embeddings,
33
+ ),
34
+ )
35
+
36
+ answer = client.search(
37
+ collection_name="MyCollection4",
38
+ query_vector=cohere_client.embed(
39
+ model="embed-english-v3.0", # New Embed v3 model
40
+ input_type="search_query", # Input type for search queries
41
+ texts=["Which database is written in Rust?"],
42
+ ).embeddings[0],
43
+
44
+ )
45
+ print(answer[0])
46
+
47
+ return answer
48
+
49
+
50
+ basic()
@@ -10,14 +10,17 @@ SERVICE_PROVIDERS = {
10
10
  "ANTHROPIC": "Anthropic",
11
11
  "AZURE": "Azure",
12
12
  "CHROMA": "Chroma",
13
+ "GROQ": "Groq",
13
14
  "LANGCHAIN": "Langchain",
14
15
  "LANGCHAIN_COMMUNITY": "Langchain Community",
15
16
  "LANGCHAIN_CORE": "Langchain Core",
17
+ "LANGGRAPH": "Langgraph",
16
18
  "LLAMAINDEX": "LlamaIndex",
17
19
  "OPENAI": "OpenAI",
18
20
  "PINECONE": "Pinecone",
19
21
  "COHERE": "Cohere",
20
22
  "PPLX": "Perplexity",
23
+ "QDRANT": "Qdrant",
21
24
  }
22
25
 
23
26
  LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY = "langtrace_additional_attributes"
@@ -0,0 +1,8 @@
1
+
2
+
3
+ APIS = {
4
+ "CHAT_COMPLETION": {
5
+ "METHOD": "groq.chat.completions.create",
6
+ "ENDPOINT": "/chat/completions",
7
+ },
8
+ }
@@ -0,0 +1,72 @@
1
+ from langtrace.trace_attributes import QdrantDBMethods
2
+
3
+ APIS = {
4
+ "ADD": {
5
+ "METHOD": QdrantDBMethods.ADD.value,
6
+ "OPERATION": "add",
7
+ },
8
+ "GET_COLLECTION": {
9
+ "METHOD": QdrantDBMethods.GET_COLLECTION.value,
10
+ "OPERATION": "get_collection",
11
+ },
12
+ "GET_COLLECTIONS": {
13
+ "METHOD": QdrantDBMethods.GET_COLLECTIONS.value,
14
+ "OPERATION": "get_collections",
15
+ },
16
+ "QUERY": {
17
+ "METHOD": QdrantDBMethods.QUERY.value,
18
+ "OPERATION": "query",
19
+ },
20
+ "QUERY_BATCH": {
21
+ "METHOD": QdrantDBMethods.QUERY_BATCH.value,
22
+ "OPERATION": "query_batch",
23
+ },
24
+ "DELETE": {
25
+ "METHOD": QdrantDBMethods.DELETE.value,
26
+ "OPERATION": "delete",
27
+ },
28
+ "DISCOVER": {
29
+ "METHOD": QdrantDBMethods.DISCOVER.value,
30
+ "OPERATION": "discover",
31
+ },
32
+ "DISCOVER_BATCH": {
33
+ "METHOD": QdrantDBMethods.DISCOVER_BATCH.value,
34
+ "OPERATION": "discover_batch",
35
+ },
36
+ "RECOMMEND": {
37
+ "METHOD": QdrantDBMethods.RECOMMEND.value,
38
+ "OPERATION": "recommend",
39
+ },
40
+ "RECOMMEND_BATCH": {
41
+ "METHOD": QdrantDBMethods.RECOMMEND_BATCH.value,
42
+ "OPERATION": "recommend_batch",
43
+ },
44
+ "RETRIEVE": {
45
+ "METHOD": QdrantDBMethods.RETRIEVE.value,
46
+ "OPERATION": "retrieve",
47
+ },
48
+ "SEARCH": {
49
+ "METHOD": QdrantDBMethods.SEARCH.value,
50
+ "OPERATION": "search",
51
+ },
52
+ "SEARCH_BATCH": {
53
+ "METHOD": QdrantDBMethods.SEARCH_BATCH.value,
54
+ "OPERATION": "search_batch",
55
+ },
56
+ "UPSERT": {
57
+ "METHOD": QdrantDBMethods.UPSERT.value,
58
+ "OPERATION": "upsert",
59
+ },
60
+ "COUNT": {
61
+ "METHOD": QdrantDBMethods.COUNT.value,
62
+ "OPERATION": "count",
63
+ },
64
+ "UPDATE_COLLECTION": {
65
+ "METHOD": QdrantDBMethods.UPDATE_COLLECTION.value,
66
+ "OPERATION": "update_collection",
67
+ },
68
+ "UPDATE_VECTORS": {
69
+ "METHOD": QdrantDBMethods.UPDATE_VECTORS.value,
70
+ "OPERATION": "update_vectors",
71
+ },
72
+ }
File without changes
@@ -0,0 +1,56 @@
1
+ """
2
+ Copyright (c) 2024 Scale3 Labs
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ """
16
+
17
+ import importlib.metadata
18
+ import logging
19
+ from typing import Collection
20
+
21
+ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
22
+ from opentelemetry.trace import get_tracer
23
+ from wrapt import wrap_function_wrapper
24
+
25
+ from langtrace_python_sdk.instrumentation.groq.patch import (
26
+ async_chat_completions_create, chat_completions_create)
27
+
28
+ logging.basicConfig(level=logging.FATAL)
29
+
30
+
31
+ class GroqInstrumentation(BaseInstrumentor):
32
+
33
+ def instrumentation_dependencies(self) -> Collection[str]:
34
+ return ["groq >= 0.5.0"]
35
+
36
+ def _instrument(self, **kwargs):
37
+ tracer_provider = kwargs.get("tracer_provider")
38
+ tracer = get_tracer(__name__, "", tracer_provider)
39
+ version = importlib.metadata.version("groq")
40
+
41
+ wrap_function_wrapper(
42
+ "groq.resources.chat.completions",
43
+ "Completions.create",
44
+ chat_completions_create("groq.chat.completions.create", version, tracer),
45
+ )
46
+
47
+ wrap_function_wrapper(
48
+ "groq.resources.chat.completions",
49
+ "AsyncCompletions.create",
50
+ async_chat_completions_create(
51
+ "groq.chat.completions.create_stream", version, tracer
52
+ ),
53
+ )
54
+
55
+ def _uninstrument(self, **kwargs):
56
+ pass