langtrace-python-sdk 1.2.25__py3-none-any.whl → 1.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/anthropic_example/completion.py +1 -1
- examples/chroma_example/basic.py +1 -1
- examples/cohere_example/__init__.py +0 -0
- examples/cohere_example/chat.py +26 -0
- examples/cohere_example/chat_stream.py +24 -0
- examples/cohere_example/embed_create.py +22 -0
- examples/fastapi_example/basic_route.py +40 -0
- examples/hiveagent_example/basic.py +23 -0
- examples/llamaindex_example/agent.py +86 -0
- examples/llamaindex_example/basic.py +1 -1
- examples/openai/chat_completion.py +1 -1
- examples/openai/function_calling.py +1 -1
- examples/perplexity_example/basic.py +5 -3
- examples/pinecone_example/basic.py +1 -1
- langtrace_python_sdk/constants/instrumentation/cohere.py +17 -0
- langtrace_python_sdk/constants/instrumentation/common.py +1 -0
- langtrace_python_sdk/extensions/langtrace_exporter.py +10 -2
- langtrace_python_sdk/instrumentation/cohere/__init__.py +0 -0
- langtrace_python_sdk/instrumentation/cohere/instrumentation.py +53 -0
- langtrace_python_sdk/instrumentation/cohere/patch.py +397 -0
- langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +19 -1
- langtrace_python_sdk/instrumentation/llamaindex/patch.py +46 -2
- langtrace_python_sdk/instrumentation/openai/instrumentation.py +25 -0
- langtrace_python_sdk/instrumentation/openai/patch.py +391 -12
- langtrace_python_sdk/langtrace.py +15 -7
- langtrace_python_sdk/version.py +1 -1
- {langtrace_python_sdk-1.2.25.dist-info → langtrace_python_sdk-1.3.2.dist-info}/METADATA +4 -2
- {langtrace_python_sdk-1.2.25.dist-info → langtrace_python_sdk-1.3.2.dist-info}/RECORD +30 -19
- {langtrace_python_sdk-1.2.25.dist-info → langtrace_python_sdk-1.3.2.dist-info}/WHEEL +1 -1
- {langtrace_python_sdk-1.2.25.dist-info → langtrace_python_sdk-1.3.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -8,7 +8,7 @@ from langtrace_python_sdk import with_langtrace_root_span
|
|
|
8
8
|
|
|
9
9
|
_ = load_dotenv(find_dotenv())
|
|
10
10
|
|
|
11
|
-
langtrace.init(
|
|
11
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
12
12
|
|
|
13
13
|
@with_langtrace_root_span("messages_create")
|
|
14
14
|
def messages_create():
|
examples/chroma_example/basic.py
CHANGED
|
@@ -7,7 +7,7 @@ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
|
7
7
|
|
|
8
8
|
_ = load_dotenv(find_dotenv())
|
|
9
9
|
|
|
10
|
-
langtrace.init(
|
|
10
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
@with_langtrace_root_span()
|
|
File without changes
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from dotenv import find_dotenv, load_dotenv
|
|
2
|
+
import cohere
|
|
3
|
+
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
# from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
6
|
+
|
|
7
|
+
_ = load_dotenv(find_dotenv())
|
|
8
|
+
|
|
9
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
10
|
+
|
|
11
|
+
co = cohere.Client()
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
# @with_langtrace_root_span("chat_create")
|
|
15
|
+
def chat_comp():
|
|
16
|
+
response = co.chat(
|
|
17
|
+
chat_history=[
|
|
18
|
+
{"role": "USER", "message": "Who discovered gravity?"},
|
|
19
|
+
{"role": "CHATBOT", "message": "The man who is widely credited with discovering gravity is Sir Isaac Newton"}
|
|
20
|
+
],
|
|
21
|
+
message="What is today's news?",
|
|
22
|
+
# preamble="answer like yoda",
|
|
23
|
+
# perform web search before answering the question. You can also use your own custom connector.
|
|
24
|
+
# connectors=[{"id": "web-search"}]
|
|
25
|
+
)
|
|
26
|
+
print(response)
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from dotenv import find_dotenv, load_dotenv
|
|
2
|
+
import cohere
|
|
3
|
+
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
|
|
6
|
+
# from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
7
|
+
|
|
8
|
+
_ = load_dotenv(find_dotenv())
|
|
9
|
+
|
|
10
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
11
|
+
|
|
12
|
+
co = cohere.Client()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# @with_langtrace_root_span("chat_stream")
|
|
16
|
+
def chat_stream():
|
|
17
|
+
result = []
|
|
18
|
+
for event in co.chat_stream(message="Tell me a short story in 2 lines"):
|
|
19
|
+
if event.event_type == "text-generation":
|
|
20
|
+
result.append(event.text)
|
|
21
|
+
elif event.event_type == "stream-end":
|
|
22
|
+
break
|
|
23
|
+
print("".join(result))
|
|
24
|
+
return result
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from dotenv import find_dotenv, load_dotenv
|
|
2
|
+
import cohere
|
|
3
|
+
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
|
|
6
|
+
# from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
7
|
+
|
|
8
|
+
_ = load_dotenv(find_dotenv())
|
|
9
|
+
|
|
10
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
11
|
+
|
|
12
|
+
co = cohere.Client()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
# @with_langtrace_root_span("embed_create")
|
|
16
|
+
def embed_create():
|
|
17
|
+
response = co.embed(
|
|
18
|
+
texts=["hello", "goodbye"],
|
|
19
|
+
model="embed-english-v3.0",
|
|
20
|
+
input_type="classification",
|
|
21
|
+
)
|
|
22
|
+
# print(response)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from fastapi import FastAPI
|
|
2
|
+
from langchain_community.vectorstores.faiss import FAISS
|
|
3
|
+
from langchain_core.output_parsers import StrOutputParser
|
|
4
|
+
from langchain_core.prompts.chat import ChatPromptTemplate
|
|
5
|
+
from langchain_core.runnables import RunnablePassthrough
|
|
6
|
+
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
|
|
7
|
+
from openai import OpenAI
|
|
8
|
+
|
|
9
|
+
from langtrace_python_sdk import langtrace
|
|
10
|
+
|
|
11
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
12
|
+
app = FastAPI()
|
|
13
|
+
client = OpenAI()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@app.get("/")
|
|
17
|
+
def root():
|
|
18
|
+
vectorstore = FAISS.from_texts(
|
|
19
|
+
["Langtrace helps you ship high quality AI Apps to production."],
|
|
20
|
+
embedding=OpenAIEmbeddings(),
|
|
21
|
+
)
|
|
22
|
+
retriever = vectorstore.as_retriever()
|
|
23
|
+
|
|
24
|
+
template = """Answer the question based only on the following context:{context}
|
|
25
|
+
|
|
26
|
+
Question: {question}
|
|
27
|
+
"""
|
|
28
|
+
prompt = ChatPromptTemplate.from_template(template)
|
|
29
|
+
|
|
30
|
+
model = ChatOpenAI()
|
|
31
|
+
|
|
32
|
+
chain = (
|
|
33
|
+
{"context": retriever, "question": RunnablePassthrough()}
|
|
34
|
+
| prompt
|
|
35
|
+
| model
|
|
36
|
+
| StrOutputParser()
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
res = chain.invoke("How is Langtrace useful?")
|
|
40
|
+
return {"response": res}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from langtrace_python_sdk import langtrace
|
|
2
|
+
from hive_agent import HiveAgent
|
|
3
|
+
from dotenv import load_dotenv
|
|
4
|
+
from openai import OpenAI
|
|
5
|
+
|
|
6
|
+
load_dotenv()
|
|
7
|
+
|
|
8
|
+
langtrace.init(
|
|
9
|
+
write_to_langtrace_cloud=False,
|
|
10
|
+
api_host="http://localhost:3000",
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
client = OpenAI()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def basic():
|
|
17
|
+
my_agent = HiveAgent(
|
|
18
|
+
name="my_agent",
|
|
19
|
+
functions=[],
|
|
20
|
+
instruction="your instructions for this agent's goal",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
my_agent.run_server()
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from typing import Sequence, List
|
|
3
|
+
|
|
4
|
+
from llama_index.agent.openai import OpenAIAgent
|
|
5
|
+
from llama_index.llms.openai import OpenAI
|
|
6
|
+
from llama_index.core.llms import ChatMessage
|
|
7
|
+
from llama_index.core.tools import BaseTool, FunctionTool
|
|
8
|
+
from openai.types.chat import ChatCompletionMessageToolCall
|
|
9
|
+
from langtrace_python_sdk import langtrace
|
|
10
|
+
|
|
11
|
+
import nest_asyncio
|
|
12
|
+
|
|
13
|
+
nest_asyncio.apply()
|
|
14
|
+
|
|
15
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def multiply(a: int, b: int) -> int:
|
|
19
|
+
"""Multiple two integers and returns the result integer"""
|
|
20
|
+
return a * b
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
multiply_tool = FunctionTool.from_defaults(fn=multiply)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def add(a: int, b: int) -> int:
|
|
27
|
+
"""Add two integers and returns the result integer"""
|
|
28
|
+
return a + b
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
add_tool = FunctionTool.from_defaults(fn=add)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class YourOpenAIAgent:
|
|
35
|
+
def __init__(
|
|
36
|
+
self,
|
|
37
|
+
tools: Sequence[BaseTool] = [],
|
|
38
|
+
llm: OpenAI = OpenAI(temperature=0, model="gpt-3.5-turbo-0613"),
|
|
39
|
+
chat_history: List[ChatMessage] = [],
|
|
40
|
+
) -> None:
|
|
41
|
+
self._llm = llm
|
|
42
|
+
self._tools = {tool.metadata.name: tool for tool in tools}
|
|
43
|
+
self._chat_history = chat_history
|
|
44
|
+
|
|
45
|
+
def reset(self) -> None:
|
|
46
|
+
self._chat_history = []
|
|
47
|
+
|
|
48
|
+
def chat(self, message: str) -> str:
|
|
49
|
+
chat_history = self._chat_history
|
|
50
|
+
chat_history.append(ChatMessage(role="user", content=message))
|
|
51
|
+
tools = [tool.metadata.to_openai_tool() for _, tool in self._tools.items()]
|
|
52
|
+
|
|
53
|
+
ai_message = self._llm.chat(chat_history, tools=tools).message
|
|
54
|
+
additional_kwargs = ai_message.additional_kwargs
|
|
55
|
+
chat_history.append(ai_message)
|
|
56
|
+
|
|
57
|
+
tool_calls = additional_kwargs.get("tool_calls", None)
|
|
58
|
+
# parallel function calling is now supported
|
|
59
|
+
if tool_calls is not None:
|
|
60
|
+
for tool_call in tool_calls:
|
|
61
|
+
function_message = self._call_function(tool_call)
|
|
62
|
+
chat_history.append(function_message)
|
|
63
|
+
ai_message = self._llm.chat(chat_history).message
|
|
64
|
+
chat_history.append(ai_message)
|
|
65
|
+
|
|
66
|
+
return ai_message.content
|
|
67
|
+
|
|
68
|
+
def _call_function(self, tool_call: ChatCompletionMessageToolCall) -> ChatMessage:
|
|
69
|
+
id_ = tool_call.id
|
|
70
|
+
function_call = tool_call.function
|
|
71
|
+
tool = self._tools[function_call.name]
|
|
72
|
+
output = tool(**json.loads(function_call.arguments))
|
|
73
|
+
return ChatMessage(
|
|
74
|
+
name=function_call.name,
|
|
75
|
+
content=str(output),
|
|
76
|
+
role="tool",
|
|
77
|
+
additional_kwargs={
|
|
78
|
+
"tool_call_id": id_,
|
|
79
|
+
"name": function_call.name,
|
|
80
|
+
},
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# agent = YourOpenAIAgent(tools=[multiply_tool, add_tool])
|
|
85
|
+
llm = OpenAI(model="gpt-3.5-turbo-0613")
|
|
86
|
+
agent = OpenAIAgent.from_tools([multiply_tool, add_tool], llm=llm, verbose=True)
|
|
@@ -7,7 +7,7 @@ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
|
7
7
|
_ = load_dotenv(find_dotenv())
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
langtrace.init(
|
|
10
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
@with_langtrace_root_span()
|
|
@@ -8,7 +8,7 @@ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
|
8
8
|
|
|
9
9
|
_ = load_dotenv(find_dotenv())
|
|
10
10
|
|
|
11
|
-
langtrace.init(
|
|
11
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
12
12
|
|
|
13
13
|
client = OpenAI()
|
|
14
14
|
|
|
@@ -2,11 +2,13 @@ from openai import OpenAI
|
|
|
2
2
|
|
|
3
3
|
from langtrace_python_sdk import langtrace
|
|
4
4
|
from langtrace_python_sdk.utils.with_root_span import (
|
|
5
|
-
with_additional_attributes,
|
|
5
|
+
with_additional_attributes,
|
|
6
|
+
with_langtrace_root_span,
|
|
7
|
+
)
|
|
6
8
|
|
|
7
9
|
# _ = load_dotenv(find_dotenv())
|
|
8
10
|
|
|
9
|
-
langtrace.init(
|
|
11
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
10
12
|
client = OpenAI(base_url="https://api.perplexity.ai", api_key="PPLX_API_KEY")
|
|
11
13
|
|
|
12
14
|
|
|
@@ -18,4 +20,4 @@ def basic():
|
|
|
18
20
|
stream=False,
|
|
19
21
|
)
|
|
20
22
|
print(response)
|
|
21
|
-
return response
|
|
23
|
+
return response
|
|
@@ -11,7 +11,7 @@ from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
|
11
11
|
|
|
12
12
|
_ = load_dotenv(find_dotenv())
|
|
13
13
|
|
|
14
|
-
langtrace.init(
|
|
14
|
+
langtrace.init(write_to_langtrace_cloud=False)
|
|
15
15
|
|
|
16
16
|
client = OpenAI()
|
|
17
17
|
pinecone = Pinecone()
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
APIS = {
|
|
2
|
+
"CHAT_CREATE": {
|
|
3
|
+
"URL": "https://api.cohere.ai",
|
|
4
|
+
"METHOD": "cohere.client.chat",
|
|
5
|
+
"ENDPOINT": "/v1/chat",
|
|
6
|
+
},
|
|
7
|
+
"EMBED_CREATE": {
|
|
8
|
+
"URL": "https://api.cohere.ai",
|
|
9
|
+
"METHOD": "cohere.client.embed",
|
|
10
|
+
"ENDPOINT": "/v1/embed",
|
|
11
|
+
},
|
|
12
|
+
"CHAT_STREAM": {
|
|
13
|
+
"URL": "https://api.cohere.ai",
|
|
14
|
+
"METHOD": "cohere.client.chat_stream",
|
|
15
|
+
"ENDPOINT": "/v1/messages",
|
|
16
|
+
},
|
|
17
|
+
}
|
|
@@ -6,7 +6,9 @@ import requests
|
|
|
6
6
|
from opentelemetry.sdk.trace.export import ReadableSpan, SpanExporter, SpanExportResult
|
|
7
7
|
from opentelemetry.trace.span import format_trace_id
|
|
8
8
|
|
|
9
|
-
from langtrace_python_sdk.constants.exporter.langtrace_exporter import
|
|
9
|
+
from langtrace_python_sdk.constants.exporter.langtrace_exporter import (
|
|
10
|
+
LANGTRACE_REMOTE_URL,
|
|
11
|
+
)
|
|
10
12
|
|
|
11
13
|
|
|
12
14
|
class LangTraceExporter(SpanExporter):
|
|
@@ -48,7 +50,12 @@ class LangTraceExporter(SpanExporter):
|
|
|
48
50
|
api_key: str
|
|
49
51
|
write_to_remote_url: bool
|
|
50
52
|
|
|
51
|
-
def __init__(
|
|
53
|
+
def __init__(
|
|
54
|
+
self,
|
|
55
|
+
api_key: str = None,
|
|
56
|
+
write_to_remote_url: bool = False,
|
|
57
|
+
api_host: typing.Optional[str] = None,
|
|
58
|
+
) -> None:
|
|
52
59
|
self.api_key = api_key or os.environ.get("LANGTRACE_API_KEY")
|
|
53
60
|
self.write_to_remote_url = write_to_remote_url
|
|
54
61
|
self.api_host: str = api_host or LANGTRACE_REMOTE_URL
|
|
@@ -89,6 +96,7 @@ class LangTraceExporter(SpanExporter):
|
|
|
89
96
|
data=json.dumps(data),
|
|
90
97
|
headers={"Content-Type": "application/json", "x-api-key": self.api_key},
|
|
91
98
|
)
|
|
99
|
+
print(f"sent to {self.api_host}/api/trace with {len(data)} spans")
|
|
92
100
|
return SpanExportResult.SUCCESS
|
|
93
101
|
except Exception as e:
|
|
94
102
|
return SpanExportResult.FAILURE
|
|
File without changes
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Instrumentation for Cohere
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import importlib.metadata
|
|
6
|
+
from typing import Collection
|
|
7
|
+
|
|
8
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
9
|
+
from opentelemetry.trace import get_tracer
|
|
10
|
+
from wrapt import wrap_function_wrapper
|
|
11
|
+
|
|
12
|
+
from langtrace_python_sdk.instrumentation.cohere.patch import (
|
|
13
|
+
chat_create,
|
|
14
|
+
chat_stream,
|
|
15
|
+
embed_create
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
class CohereInstrumentation(BaseInstrumentor):
|
|
19
|
+
"""
|
|
20
|
+
The CohereInstrumentation class represents the Anthropic instrumentation
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
24
|
+
return ["cohere >= 5.0.0"]
|
|
25
|
+
|
|
26
|
+
def _instrument(self, **kwargs):
|
|
27
|
+
tracer_provider = kwargs.get("tracer_provider")
|
|
28
|
+
tracer = get_tracer(__name__, "", tracer_provider)
|
|
29
|
+
version = importlib.metadata.version("cohere")
|
|
30
|
+
|
|
31
|
+
wrap_function_wrapper(
|
|
32
|
+
"cohere.client",
|
|
33
|
+
"Client.chat",
|
|
34
|
+
chat_create("cohere.client.chat", version, tracer),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
wrap_function_wrapper(
|
|
38
|
+
"cohere.client",
|
|
39
|
+
"Client.chat_stream",
|
|
40
|
+
chat_stream("cohere.client.chat_stream", version, tracer),
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
wrap_function_wrapper(
|
|
44
|
+
"cohere.client",
|
|
45
|
+
"Client.embed",
|
|
46
|
+
embed_create("cohere.client.embed", version, tracer),
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
def _instrument_module(self, module_name):
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
def _uninstrument(self, **kwargs):
|
|
53
|
+
pass
|