langtrace-python-sdk 1.0.9__tar.gz → 1.0.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {langtrace-python-sdk-1.0.9/src/langtrace_python_sdk.egg-info → langtrace-python-sdk-1.0.11}/PKG-INFO +1 -1
- langtrace-python-sdk-1.0.11/README.md +22 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/setup.py +2 -2
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/chroma_example/basic.py +1 -1
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/langchain_example/basic.py +1 -1
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/langchain_example/tool.py +2 -2
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/llamaindex_example/basic.py +1 -1
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/chat_completion.py +16 -3
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/embeddings_create.py +1 -1
- langtrace-python-sdk-1.0.11/src/examples/openai/function_calling.py +75 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/images_generate.py +1 -1
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/pinecone_example/basic.py +1 -1
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/chroma/instrumentation.py +10 -4
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/chroma/patch.py +8 -2
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/constants.py +4 -9
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain_core/instrumentation.py +11 -5
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain_core/patch.py +24 -8
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/llamaindex/instrumentation.py +6 -4
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/llamaindex/patch.py +6 -1
- {langtrace-python-sdk-1.0.9/src/instrumentation/openai/lib → langtrace-python-sdk-1.0.11/src/instrumentation/openai}/apis.py +3 -0
- {langtrace-python-sdk-1.0.9/src/instrumentation/openai/lib → langtrace-python-sdk-1.0.11/src/instrumentation/openai}/constants.py +13 -0
- langtrace-python-sdk-1.0.11/src/instrumentation/openai/patch.py +268 -0
- langtrace-python-sdk-1.0.11/src/instrumentation/openai/token_estimation.py +48 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/pinecone/instrumentation.py +8 -2
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/pinecone/patch.py +6 -4
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11/src/langtrace_python_sdk.egg-info}/PKG-INFO +1 -1
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/langtrace_python_sdk.egg-info/SOURCES.txt +7 -6
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/langtrace_python_sdk.egg-info/top_level.txt +1 -0
- langtrace-python-sdk-1.0.9/README.md +0 -3
- langtrace-python-sdk-1.0.9/src/instrumentation/openai/patch.py +0 -209
- langtrace-python-sdk-1.0.9/src/instrumentation/utils.py +0 -27
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/LICENSE +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/setup.cfg +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/chroma_example/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/langchain_example/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/llamaindex_example/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/pinecone_example/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9/src/instrumentation → langtrace-python-sdk-1.0.11/src/examples}/setup.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/chroma/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9/src/instrumentation/chroma/lib → langtrace-python-sdk-1.0.11/src/instrumentation/chroma}/apis.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/chroma/lib/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain/instrumentation.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain/patch.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain_community/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain_community/instrumentation.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain_community/patch.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/langchain_core/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/llamaindex/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/openai/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/openai/instrumentation.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/openai/lib/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/pinecone/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9/src/instrumentation/pinecone/lib → langtrace-python-sdk-1.0.11/src/instrumentation/pinecone}/apis.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/pinecone/lib/__init__.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/with_root_span.py +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/langtrace_python_sdk.egg-info/dependency_links.txt +0 -0
- {langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/langtrace_python_sdk.egg-info/requires.txt +0 -0
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# langtrace-python-sdk
|
|
2
|
+
|
|
3
|
+
export PYTHONPATH="/Users/karthikkalyanaraman/work/langtrace/python-sdk:$PYTHONPATH"
|
|
4
|
+
|
|
5
|
+
## Steps to run
|
|
6
|
+
|
|
7
|
+
1. From your root directory, create a virtualenv for installing your dependencies
|
|
8
|
+
```
|
|
9
|
+
python -m venv pysdk
|
|
10
|
+
```
|
|
11
|
+
2. Activate the virtualenv
|
|
12
|
+
```
|
|
13
|
+
source pysdk/bin/activate
|
|
14
|
+
```
|
|
15
|
+
3. Install the dependencies
|
|
16
|
+
```
|
|
17
|
+
pip install -r requirements.txt
|
|
18
|
+
```
|
|
19
|
+
4. Run the example and see the traces on the terminal
|
|
20
|
+
```
|
|
21
|
+
python src/entrypoint.py
|
|
22
|
+
```
|
|
@@ -4,7 +4,7 @@ with open('requirements.txt') as f:
|
|
|
4
4
|
|
|
5
5
|
setup(
|
|
6
6
|
name='langtrace-python-sdk', # Choose a unique name for PyPI
|
|
7
|
-
version='1.0.
|
|
7
|
+
version='1.0.11',
|
|
8
8
|
author='Ali Waleed',
|
|
9
9
|
author_email='ali@scale3labs.com',
|
|
10
10
|
description='LangTrace - Python SDK',
|
|
@@ -12,7 +12,7 @@ setup(
|
|
|
12
12
|
long_description_content_type='text/markdown',
|
|
13
13
|
url='https://github.com/Scale3-Labs/langtrace-python-sdk', # Project home page
|
|
14
14
|
package_dir={'': 'src'},
|
|
15
|
-
packages=find_packages(where='src'
|
|
15
|
+
packages=find_packages(where='src'),
|
|
16
16
|
install_requires=required,
|
|
17
17
|
python_requires='>=3.6',
|
|
18
18
|
classifiers=[
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/chroma_example/basic.py
RENAMED
|
@@ -2,7 +2,7 @@ import chromadb
|
|
|
2
2
|
from chromadb.utils import embedding_functions
|
|
3
3
|
from dotenv import find_dotenv, load_dotenv
|
|
4
4
|
|
|
5
|
-
from
|
|
5
|
+
from examples.setup import setup_instrumentation
|
|
6
6
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
7
7
|
|
|
8
8
|
_ = load_dotenv(find_dotenv())
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/langchain_example/basic.py
RENAMED
|
@@ -7,7 +7,7 @@ from langchain_core.prompts.chat import ChatPromptTemplate
|
|
|
7
7
|
from langchain_core.runnables import RunnablePassthrough
|
|
8
8
|
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
|
|
9
9
|
|
|
10
|
-
from
|
|
10
|
+
from examples.setup import setup_instrumentation
|
|
11
11
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
12
12
|
|
|
13
13
|
_ = load_dotenv(find_dotenv())
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/langchain_example/tool.py
RENAMED
|
@@ -6,7 +6,7 @@ from langchain_core.pydantic_v1 import BaseModel, Field
|
|
|
6
6
|
from langchain_core.tools import Tool
|
|
7
7
|
from langchain_openai import ChatOpenAI
|
|
8
8
|
|
|
9
|
-
from
|
|
9
|
+
from examples.setup import setup_instrumentation
|
|
10
10
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
11
11
|
|
|
12
12
|
_ = load_dotenv(find_dotenv())
|
|
@@ -45,7 +45,7 @@ async def aget_prime(n: int, primes: dict = primes) -> str:
|
|
|
45
45
|
return str(primes.get(int(n)))
|
|
46
46
|
|
|
47
47
|
|
|
48
|
-
|
|
48
|
+
@with_langtrace_root_span()
|
|
49
49
|
def tool_example():
|
|
50
50
|
|
|
51
51
|
tools = [
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/llamaindex_example/basic.py
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from examples.setup import setup_instrumentation
|
|
5
5
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
6
6
|
|
|
7
7
|
_ = load_dotenv(find_dotenv())
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/chat_completion.py
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from examples.setup import setup_instrumentation
|
|
5
5
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
6
6
|
|
|
7
7
|
_ = load_dotenv(find_dotenv())
|
|
@@ -13,14 +13,27 @@ client = OpenAI()
|
|
|
13
13
|
|
|
14
14
|
@with_langtrace_root_span()
|
|
15
15
|
def chat_completion():
|
|
16
|
-
|
|
16
|
+
response = client.chat.completions.create(
|
|
17
17
|
model="gpt-4",
|
|
18
18
|
messages=[{"role": "user", "content": "Say this is a test three times"}],
|
|
19
|
-
stream=
|
|
19
|
+
stream=True,
|
|
20
20
|
)
|
|
21
|
+
# print(stream)
|
|
21
22
|
# stream = client.chat.completions.create(
|
|
22
23
|
# model="gpt-4",
|
|
23
24
|
# messages=[{"role": "user", "content": "Say this is a test three times"}, {"role": "assistant", "content": "This is a test. This is a test. This is a test"},
|
|
24
25
|
# {"role": "user", "content": "Say this is a mock 4 times"}],
|
|
25
26
|
# stream=False,
|
|
26
27
|
# )
|
|
28
|
+
|
|
29
|
+
result = []
|
|
30
|
+
for chunk in response:
|
|
31
|
+
if chunk.choices[0].delta.function_call is not None:
|
|
32
|
+
content = [
|
|
33
|
+
choice.delta.function_call.arguments if choice.delta.function_call and
|
|
34
|
+
choice.delta.function_call.arguments else ""
|
|
35
|
+
for choice in chunk.choices]
|
|
36
|
+
result.append(
|
|
37
|
+
content[0] if len(content) > 0 else "")
|
|
38
|
+
|
|
39
|
+
print("".join(result))
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/embeddings_create.py
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from examples.setup import setup_instrumentation
|
|
5
5
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
6
6
|
|
|
7
7
|
_ = load_dotenv(find_dotenv())
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
from dotenv import find_dotenv, load_dotenv
|
|
4
|
+
from openai import OpenAI
|
|
5
|
+
|
|
6
|
+
from examples.setup import setup_instrumentation
|
|
7
|
+
from instrumentation.with_root_span import with_langtrace_root_span
|
|
8
|
+
|
|
9
|
+
_ = load_dotenv(find_dotenv())
|
|
10
|
+
|
|
11
|
+
setup_instrumentation()
|
|
12
|
+
|
|
13
|
+
client = OpenAI()
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
student_custom_functions = [
|
|
17
|
+
{
|
|
18
|
+
'name': 'extract_student_info',
|
|
19
|
+
'description': 'Get the student information from the body of the input text',
|
|
20
|
+
'parameters': {
|
|
21
|
+
'type': 'object',
|
|
22
|
+
'properties': {
|
|
23
|
+
'name': {
|
|
24
|
+
'type': 'string',
|
|
25
|
+
'description': 'Name of the person'
|
|
26
|
+
},
|
|
27
|
+
'major': {
|
|
28
|
+
'type': 'string',
|
|
29
|
+
'description': 'Major subject.'
|
|
30
|
+
},
|
|
31
|
+
'school': {
|
|
32
|
+
'type': 'string',
|
|
33
|
+
'description': 'The university name.'
|
|
34
|
+
},
|
|
35
|
+
'grades': {
|
|
36
|
+
'type': 'integer',
|
|
37
|
+
'description': 'GPA of the student.'
|
|
38
|
+
},
|
|
39
|
+
'club': {
|
|
40
|
+
'type': 'string',
|
|
41
|
+
'description': 'School club for extracurricular activities. '
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
]
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@with_langtrace_root_span()
|
|
51
|
+
def function_calling():
|
|
52
|
+
response = client.chat.completions.create(
|
|
53
|
+
model='gpt-3.5-turbo',
|
|
54
|
+
messages=[{'role': 'user', 'content': "David Nguyen is a sophomore majoring in computer science at Stanford University. He is Asian American and has a 3.8 GPA. David is known for his programming skills and is an active member of the university's Robotics Club. He hopes to pursue a career in artificial intelligence after graduating."}],
|
|
55
|
+
functions=student_custom_functions,
|
|
56
|
+
function_call='auto',
|
|
57
|
+
stream=False
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
# result = []
|
|
61
|
+
# for chunk in response:
|
|
62
|
+
# if chunk.choices[0].delta.function_call is not None:
|
|
63
|
+
# content = [
|
|
64
|
+
# choice.delta.function_call.arguments if choice.delta.function_call and
|
|
65
|
+
# choice.delta.function_call.arguments else ""
|
|
66
|
+
# for choice in chunk.choices]
|
|
67
|
+
# result.append(
|
|
68
|
+
# content[0] if len(content) > 0 else "")
|
|
69
|
+
|
|
70
|
+
# print("".join(result))
|
|
71
|
+
|
|
72
|
+
# Loading the response as a JSON object
|
|
73
|
+
json_response = json.loads(
|
|
74
|
+
response.choices[0].message.function_call.arguments)
|
|
75
|
+
print(json_response)
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/openai/images_generate.py
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
|
|
4
|
-
from
|
|
4
|
+
from examples.setup import setup_instrumentation
|
|
5
5
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
6
6
|
|
|
7
7
|
_ = load_dotenv(find_dotenv())
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/examples/pinecone_example/basic.py
RENAMED
|
@@ -2,7 +2,7 @@ from dotenv import find_dotenv, load_dotenv
|
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
from pinecone import Pinecone
|
|
4
4
|
|
|
5
|
-
from
|
|
5
|
+
from examples.setup import setup_instrumentation
|
|
6
6
|
from instrumentation.with_root_span import with_langtrace_root_span
|
|
7
7
|
|
|
8
8
|
_ = load_dotenv(find_dotenv())
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Instrumentation for ChromaDB
|
|
3
|
+
"""
|
|
1
4
|
import importlib.metadata
|
|
2
5
|
from typing import Collection
|
|
3
6
|
|
|
@@ -5,11 +8,14 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
5
8
|
from opentelemetry.trace import get_tracer
|
|
6
9
|
from wrapt import wrap_function_wrapper
|
|
7
10
|
|
|
8
|
-
from instrumentation.chroma.
|
|
11
|
+
from instrumentation.chroma.apis import APIS
|
|
9
12
|
from instrumentation.chroma.patch import collection_patch
|
|
10
13
|
|
|
11
14
|
|
|
12
15
|
class ChromaInstrumentation(BaseInstrumentor):
|
|
16
|
+
"""
|
|
17
|
+
The ChromaInstrumentation class represents the ChromaDB instrumentation
|
|
18
|
+
"""
|
|
13
19
|
|
|
14
20
|
def instrumentation_dependencies(self) -> Collection[str]:
|
|
15
21
|
return ["chromadb >= 0.4.23"]
|
|
@@ -19,7 +25,7 @@ class ChromaInstrumentation(BaseInstrumentor):
|
|
|
19
25
|
tracer = get_tracer(__name__, "", tracer_provider)
|
|
20
26
|
version = importlib.metadata.version('chromadb')
|
|
21
27
|
|
|
22
|
-
for operation,
|
|
28
|
+
for operation, _ in APIS.items():
|
|
23
29
|
wrap_function_wrapper(
|
|
24
30
|
'chromadb.api.models.Collection',
|
|
25
31
|
f'Collection.{operation.lower()}',
|
|
@@ -27,7 +33,7 @@ class ChromaInstrumentation(BaseInstrumentor):
|
|
|
27
33
|
)
|
|
28
34
|
|
|
29
35
|
def _instrument_module(self, module_name):
|
|
30
|
-
|
|
36
|
+
pass
|
|
31
37
|
|
|
32
38
|
def _uninstrument(self, **kwargs):
|
|
33
|
-
|
|
39
|
+
pass
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/chroma/patch.py
RENAMED
|
@@ -1,12 +1,18 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the patching logic for the Chroma client.
|
|
3
|
+
"""
|
|
1
4
|
from langtrace.trace_attributes import DatabaseSpanAttributes
|
|
2
|
-
from opentelemetry.trace import SpanKind
|
|
5
|
+
from opentelemetry.trace import SpanKind
|
|
3
6
|
from opentelemetry.trace.status import Status, StatusCode
|
|
4
7
|
|
|
8
|
+
from instrumentation.chroma.apis import APIS
|
|
5
9
|
from instrumentation.constants import SERVICE_PROVIDERS
|
|
6
|
-
from instrumentation.chroma.lib.apis import APIS
|
|
7
10
|
|
|
8
11
|
|
|
9
12
|
def collection_patch(method, version, tracer):
|
|
13
|
+
"""
|
|
14
|
+
A generic patch method that wraps a function with a span
|
|
15
|
+
"""
|
|
10
16
|
def traced_method(wrapped, instance, args, kwargs):
|
|
11
17
|
api = APIS[method]
|
|
12
18
|
service_provider = SERVICE_PROVIDERS['CHROMA']
|
|
@@ -1,16 +1,11 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
"LANGCHAIN": "Langtrace Langchain SDK",
|
|
5
|
-
"PINECONE": "Langtrace Pinecone SDK",
|
|
6
|
-
"LLAMAINDEX": "Langtrace LlamaIndex SDK",
|
|
7
|
-
"CHROMA": "Langtrace Chroma SDK",
|
|
8
|
-
}
|
|
9
|
-
|
|
1
|
+
"""
|
|
2
|
+
This file contains the constants used in the project.
|
|
3
|
+
"""
|
|
10
4
|
SERVICE_PROVIDERS = {
|
|
11
5
|
"OPENAI": "OpenAI",
|
|
12
6
|
"AZURE": "Azure",
|
|
13
7
|
"LANGCHAIN": "Langchain",
|
|
8
|
+
"LANGCHAIN_CORE": "Langchain Core",
|
|
14
9
|
"LANGCHAIN_COMMUNITY": "Langchain Community",
|
|
15
10
|
"PINECONE": "Pinecone",
|
|
16
11
|
"LLAMAINDEX": "LlamaIndex",
|
|
@@ -71,15 +71,21 @@ class LangchainCoreInstrumentation(BaseInstrumentor):
|
|
|
71
71
|
version = importlib.metadata.version('langchain-core')
|
|
72
72
|
|
|
73
73
|
exclude_methods = ['get_name', 'get_output_schema',
|
|
74
|
-
'get_input_schema', 'get_graph', 'to_json'
|
|
75
|
-
|
|
74
|
+
'get_input_schema', 'get_graph', 'to_json',
|
|
75
|
+
'to_json_not_implemented', 'bind', 'dict',
|
|
76
|
+
'format', 'format_messages', 'format_prompt']
|
|
77
|
+
exclude_classes = ['BaseChatPromptTemplate', 'Runnable', 'RunnableBinding',
|
|
78
|
+
'RunnableBindingBase', 'RunnableEach', 'RunnableEachBase',
|
|
79
|
+
'RunnableGenerator', 'RunnablePick', 'RunnableMap',
|
|
80
|
+
'RunnableSerializable']
|
|
81
|
+
|
|
76
82
|
modules_to_patch = [
|
|
77
83
|
('langchain_core.retrievers', 'retriever',
|
|
78
84
|
generic_patch, True, True),
|
|
79
|
-
('langchain_core.prompts.chat', '
|
|
80
|
-
generic_patch, True,
|
|
85
|
+
('langchain_core.prompts.chat', 'prompt',
|
|
86
|
+
generic_patch, True, True),
|
|
81
87
|
('langchain_core.runnables.base',
|
|
82
|
-
'
|
|
88
|
+
'runnable', runnable_patch, True, True),
|
|
83
89
|
('langchain_core.runnables.passthrough',
|
|
84
90
|
'runnablepassthrough', runnable_patch, True, True),
|
|
85
91
|
('langchain_core.output_parsers.string',
|
|
@@ -22,7 +22,7 @@ def generic_patch(method_name, task, tracer, version, trace_output=True, trace_i
|
|
|
22
22
|
"""
|
|
23
23
|
|
|
24
24
|
def traced_method(wrapped, instance, args, kwargs):
|
|
25
|
-
service_provider = SERVICE_PROVIDERS['
|
|
25
|
+
service_provider = SERVICE_PROVIDERS['LANGCHAIN_CORE']
|
|
26
26
|
span_attributes = {
|
|
27
27
|
'langtrace.service.name': service_provider,
|
|
28
28
|
'langtrace.service.type': 'framework',
|
|
@@ -32,7 +32,18 @@ def generic_patch(method_name, task, tracer, version, trace_output=True, trace_i
|
|
|
32
32
|
}
|
|
33
33
|
|
|
34
34
|
if len(args) > 0 and trace_input:
|
|
35
|
-
|
|
35
|
+
inputs = {}
|
|
36
|
+
for arg in args:
|
|
37
|
+
if isinstance(arg, dict):
|
|
38
|
+
for key, value in arg.items():
|
|
39
|
+
if isinstance(value, list):
|
|
40
|
+
for item in value:
|
|
41
|
+
inputs[key] = item.__class__.__name__
|
|
42
|
+
elif isinstance(value, str):
|
|
43
|
+
inputs[key] = value
|
|
44
|
+
elif isinstance(arg, str):
|
|
45
|
+
inputs['input'] = arg
|
|
46
|
+
span_attributes['langchain.inputs'] = to_json_string(inputs)
|
|
36
47
|
|
|
37
48
|
attributes = FrameworkSpanAttributes(**span_attributes)
|
|
38
49
|
|
|
@@ -73,7 +84,7 @@ def runnable_patch(method_name, task, tracer, version, trace_output=True, trace_
|
|
|
73
84
|
trace_input: Whether to trace the input of the patched methods.
|
|
74
85
|
"""
|
|
75
86
|
def traced_method(wrapped, instance, args, kwargs):
|
|
76
|
-
service_provider = SERVICE_PROVIDERS['
|
|
87
|
+
service_provider = SERVICE_PROVIDERS['LANGCHAIN_CORE']
|
|
77
88
|
span_attributes = {
|
|
78
89
|
'langtrace.service.name': service_provider,
|
|
79
90
|
'langtrace.service.type': 'framework',
|
|
@@ -84,12 +95,17 @@ def runnable_patch(method_name, task, tracer, version, trace_output=True, trace_
|
|
|
84
95
|
|
|
85
96
|
if trace_input:
|
|
86
97
|
inputs = {}
|
|
87
|
-
args_list = []
|
|
88
98
|
if len(args) > 0:
|
|
89
|
-
for
|
|
90
|
-
if isinstance(
|
|
91
|
-
|
|
92
|
-
|
|
99
|
+
for arg in args:
|
|
100
|
+
if isinstance(arg, dict):
|
|
101
|
+
for key, value in arg.items():
|
|
102
|
+
if isinstance(value, list):
|
|
103
|
+
for item in value:
|
|
104
|
+
inputs[key] = item.__class__.__name__
|
|
105
|
+
elif isinstance(value, str):
|
|
106
|
+
inputs[key] = value
|
|
107
|
+
elif isinstance(arg, str):
|
|
108
|
+
inputs['input'] = arg
|
|
93
109
|
|
|
94
110
|
for field, value in instance.steps.items() if hasattr(instance, "steps") and \
|
|
95
111
|
isinstance(instance.steps, dict) else {}:
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
"""
|
|
2
|
+
The LlamaindexInstrumentation class represents the LlamaIndex instrumentation
|
|
3
|
+
"""
|
|
1
4
|
import importlib.metadata
|
|
2
5
|
from typing import Collection
|
|
3
6
|
|
|
@@ -8,12 +11,11 @@ from wrapt import wrap_function_wrapper
|
|
|
8
11
|
|
|
9
12
|
from instrumentation.llamaindex.patch import generic_patch
|
|
10
13
|
|
|
11
|
-
MODULES = [
|
|
12
|
-
"llama_index.core.query_pipeline.query",
|
|
13
|
-
]
|
|
14
|
-
|
|
15
14
|
|
|
16
15
|
class LlamaindexInstrumentation(BaseInstrumentor):
|
|
16
|
+
"""
|
|
17
|
+
The LlamaindexInstrumentation class represents the LlamaIndex instrumentation
|
|
18
|
+
"""
|
|
17
19
|
|
|
18
20
|
def instrumentation_dependencies(self) -> Collection[str]:
|
|
19
21
|
return ["llama-index >= 0.10.0"]
|
{langtrace-python-sdk-1.0.9 → langtrace-python-sdk-1.0.11}/src/instrumentation/llamaindex/patch.py
RENAMED
|
@@ -1,11 +1,16 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains a generic patch method that wraps a function with a span.
|
|
3
|
+
"""
|
|
1
4
|
from langtrace.trace_attributes import FrameworkSpanAttributes
|
|
2
|
-
from opentelemetry.trace import SpanKind
|
|
5
|
+
from opentelemetry.trace import SpanKind
|
|
3
6
|
from opentelemetry.trace.status import Status, StatusCode
|
|
4
7
|
|
|
5
8
|
from instrumentation.constants import SERVICE_PROVIDERS
|
|
6
9
|
|
|
7
10
|
|
|
8
11
|
def generic_patch(method, task, tracer, version):
|
|
12
|
+
"""
|
|
13
|
+
A generic patch method that wraps a function with a span"""
|
|
9
14
|
def traced_method(wrapped, instance, args, kwargs):
|
|
10
15
|
service_provider = SERVICE_PROVIDERS['LLAMAINDEX']
|
|
11
16
|
span_attributes = {
|
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Constants for OpenAI API"""
|
|
3
|
+
|
|
1
4
|
OPENAI_COST_TABLE = {
|
|
2
5
|
"gpt-4-0125-preview": {
|
|
3
6
|
"input": 0.01,
|
|
@@ -28,3 +31,13 @@ OPENAI_COST_TABLE = {
|
|
|
28
31
|
"output": 0.002,
|
|
29
32
|
},
|
|
30
33
|
}
|
|
34
|
+
|
|
35
|
+
# TODO: Add more models
|
|
36
|
+
# https://github.com/dqbd/tiktoken/blob/74c147e19584a3a1acea0c8e0da4d39415cd33e0/wasm/src/lib.rs#L328
|
|
37
|
+
TIKTOKEN_MODEL_MAPPING = {
|
|
38
|
+
"gpt-4": "cl100k_base",
|
|
39
|
+
"gpt-4-32k": "cl100k_base",
|
|
40
|
+
"gpt-4-0125-preview": "cl100k_base",
|
|
41
|
+
"gpt-4-1106-preview": "cl100k_base",
|
|
42
|
+
"gpt-4-1106-vision-preview": "cl100k_base",
|
|
43
|
+
}
|