langtrace-python-sdk 1.1.2__tar.gz → 1.1.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langtrace-python-sdk-1.1.5/PKG-INFO +83 -0
- langtrace-python-sdk-1.1.5/README.md +67 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/setup.py +5 -3
- langtrace-python-sdk-1.1.5/src/examples/anthropic_example/completion.py +29 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/basic.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/basic.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/tool.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/llamaindex_example/basic.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/chat_completion.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/embeddings_create.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/function_calling.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/images_generate.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/pinecone_example/basic.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/__init__.py +2 -3
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/constants/instrumentation/anthropic.py +6 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/common.py +5 -4
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/extensions/langtrace_exporter.py +2 -2
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py +39 -0
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +137 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/chroma/patch.py +2 -2
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py +1 -2
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain/patch.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_community/instrumentation.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_community/patch.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_core/patch.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +1 -2
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/llamaindex/patch.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/openai/instrumentation.py +1 -1
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/openai/patch.py +4 -4
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/pinecone/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/pinecone/instrumentation.py +2 -3
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/pinecone/patch.py +2 -2
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/langtrace.py +12 -8
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/utils/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/utils/llm.py +2 -2
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk.egg-info/PKG-INFO +83 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk.egg-info/SOURCES.txt +6 -1
- langtrace-python-sdk-1.1.2/PKG-INFO +0 -17
- langtrace-python-sdk-1.1.2/README.md +0 -22
- langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/chroma/apis.py +0 -40
- langtrace-python-sdk-1.1.2/src/langtrace_python_sdk.egg-info/PKG-INFO +0 -17
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/LICENSE +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/setup.cfg +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/examples/langchain_example → langtrace-python-sdk-1.1.5/src/examples/anthropic_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/examples/llamaindex_example → langtrace-python-sdk-1.1.5/src/examples/langchain_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/examples/openai → langtrace-python-sdk-1.1.5/src/examples/llamaindex_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/examples/pinecone_example → langtrace-python-sdk-1.1.5/src/examples/openai}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/constants → langtrace-python-sdk-1.1.5/src/examples/pinecone_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/constants/instrumentation → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/constants}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/extensions → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/constants/instrumentation}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/chroma.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/openai.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/pinecone.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/extensions}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/chroma → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/langchain → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/langchain_community → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/chroma}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/langchain_core → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/langchain}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/llamaindex → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/langchain_community}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/openai → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/langchain_core}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/instrumentation/pinecone → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/llamaindex}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2/src/langtrace_python_sdk/utils → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/openai}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/utils/with_root_span.py +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk.egg-info/dependency_links.txt +0 -0
- {langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: langtrace-python-sdk
|
|
3
|
+
Version: 1.1.5
|
|
4
|
+
Summary: Python SDK for LangTrace
|
|
5
|
+
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
+
Author: Scale3 Labs
|
|
7
|
+
Author-email: engineering@scale3labs.com
|
|
8
|
+
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
+
License: AGPL-3.0-or-later
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.6
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
|
|
17
|
+
<h1 align="center">LangTrace</h1>
|
|
18
|
+
|
|
19
|
+
Looking for the Typescript version? Check out [langtrace-typescript](https://github.com/Scale3-Labs/langtrace-typescript-sdk).
|
|
20
|
+
|
|
21
|
+
LangTrace is a set of extensions built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM application. Because it uses OpenTelemetry under the hood.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
The repo contains standard OpenTelemetry instrumentations for LLM providers and Vector DBs, while still outputting standard OpenTelemetry data that can be connected to your observability stack.
|
|
25
|
+
If you already have OpenTelemetry instrumented, you can just add any of our instrumentations directly.
|
|
26
|
+
|
|
27
|
+
## 🚀 Getting Started
|
|
28
|
+
|
|
29
|
+
The easiest way to get started is to use our SDK.
|
|
30
|
+
|
|
31
|
+
Install the SDK:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install langtrace-python-sdk
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Then, to start instrumenting your code, just add this line to your code:
|
|
38
|
+
|
|
39
|
+
```python
|
|
40
|
+
from langtrace_python_sdk import langtrace
|
|
41
|
+
|
|
42
|
+
langtrace.init()
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
That's it. You're now tracing your code with LangTrace!
|
|
46
|
+
If you want to see the traces you can enable logging
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
langtrace.init(log_spans_to_console=True)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
If you want to export traces to an external endpoint, you will need to add ```LANGTRACE_URL``` to ```.env``` file.
|
|
53
|
+
```python
|
|
54
|
+
langtrace.init(write_to_remote_url=True)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
## 🪗 What do we instrument?
|
|
60
|
+
|
|
61
|
+
OpenLLMetry can instrument everything that [OpenTelemetry already instruments](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation) - so things like your DB, API calls, and more. On top of that, we built a set of custom extensions that instrument things like your calls to OpenAI or Anthropic, or your Vector DB like Chroma, Pinecone, Qdrant or Weaviate.
|
|
62
|
+
|
|
63
|
+
### LLM Providers
|
|
64
|
+
|
|
65
|
+
- ✅ OpenAI / Azure OpenAI
|
|
66
|
+
- ✅ Anthropic
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
### Vector DBs
|
|
72
|
+
|
|
73
|
+
- ✅ Chroma
|
|
74
|
+
- ✅ Pinecone
|
|
75
|
+
|
|
76
|
+
### Frameworks
|
|
77
|
+
|
|
78
|
+
- ✅ LangChain
|
|
79
|
+
- ✅ [LlamaIndex](https://docs.llamaindex.ai/en/stable/module_guides/observability/observability.html#openllmetry)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
<h1 align="center">LangTrace</h1>
|
|
2
|
+
|
|
3
|
+
Looking for the Typescript version? Check out [langtrace-typescript](https://github.com/Scale3-Labs/langtrace-typescript-sdk).
|
|
4
|
+
|
|
5
|
+
LangTrace is a set of extensions built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM application. Because it uses OpenTelemetry under the hood.
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
The repo contains standard OpenTelemetry instrumentations for LLM providers and Vector DBs, while still outputting standard OpenTelemetry data that can be connected to your observability stack.
|
|
9
|
+
If you already have OpenTelemetry instrumented, you can just add any of our instrumentations directly.
|
|
10
|
+
|
|
11
|
+
## 🚀 Getting Started
|
|
12
|
+
|
|
13
|
+
The easiest way to get started is to use our SDK.
|
|
14
|
+
|
|
15
|
+
Install the SDK:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pip install langtrace-python-sdk
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
Then, to start instrumenting your code, just add this line to your code:
|
|
22
|
+
|
|
23
|
+
```python
|
|
24
|
+
from langtrace_python_sdk import langtrace
|
|
25
|
+
|
|
26
|
+
langtrace.init()
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
That's it. You're now tracing your code with LangTrace!
|
|
30
|
+
If you want to see the traces you can enable logging
|
|
31
|
+
|
|
32
|
+
```python
|
|
33
|
+
langtrace.init(log_spans_to_console=True)
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
If you want to export traces to an external endpoint, you will need to add ```LANGTRACE_URL``` to ```.env``` file.
|
|
37
|
+
```python
|
|
38
|
+
langtrace.init(write_to_remote_url=True)
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
## 🪗 What do we instrument?
|
|
44
|
+
|
|
45
|
+
OpenLLMetry can instrument everything that [OpenTelemetry already instruments](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation) - so things like your DB, API calls, and more. On top of that, we built a set of custom extensions that instrument things like your calls to OpenAI or Anthropic, or your Vector DB like Chroma, Pinecone, Qdrant or Weaviate.
|
|
46
|
+
|
|
47
|
+
### LLM Providers
|
|
48
|
+
|
|
49
|
+
- ✅ OpenAI / Azure OpenAI
|
|
50
|
+
- ✅ Anthropic
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
### Vector DBs
|
|
56
|
+
|
|
57
|
+
- ✅ Chroma
|
|
58
|
+
- ✅ Pinecone
|
|
59
|
+
|
|
60
|
+
### Frameworks
|
|
61
|
+
|
|
62
|
+
- ✅ LangChain
|
|
63
|
+
- ✅ [LlamaIndex](https://docs.llamaindex.ai/en/stable/module_guides/observability/observability.html#openllmetry)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
|
|
@@ -1,15 +1,17 @@
|
|
|
1
1
|
from setuptools import find_packages, setup
|
|
2
|
-
|
|
2
|
+
def readme():
|
|
3
|
+
with open('README.md') as f:
|
|
4
|
+
return f.read()
|
|
3
5
|
setup(
|
|
4
6
|
name='langtrace-python-sdk', # Choose a unique name for PyPI
|
|
5
|
-
version='1.1.
|
|
7
|
+
version='1.1.5',
|
|
6
8
|
author='Scale3 Labs',
|
|
7
9
|
license="AGPL-3.0-or-later",
|
|
8
10
|
author_email='engineering@scale3labs.com',
|
|
9
11
|
maintainer=['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber',
|
|
10
12
|
'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi'],
|
|
11
13
|
description='Python SDK for LangTrace',
|
|
12
|
-
long_description=
|
|
14
|
+
long_description=readme(),
|
|
13
15
|
long_description_content_type='text/markdown',
|
|
14
16
|
url='https://github.com/Scale3-Labs/langtrace-python-sdk', # Project home page
|
|
15
17
|
package_dir={'': 'src'},
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Example of using the anthropic API to create a message."""
|
|
2
|
+
import anthropic
|
|
3
|
+
from dotenv import find_dotenv, load_dotenv
|
|
4
|
+
|
|
5
|
+
from langtrace_python_sdk import langtrace
|
|
6
|
+
|
|
7
|
+
_ = load_dotenv(find_dotenv())
|
|
8
|
+
|
|
9
|
+
langtrace.init(batch=False, log_spans_to_console=True,
|
|
10
|
+
write_to_remote_url=False)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def messages_create():
|
|
14
|
+
|
|
15
|
+
client = anthropic.Anthropic()
|
|
16
|
+
|
|
17
|
+
message = client.messages.create(
|
|
18
|
+
model="claude-3-opus-20240229",
|
|
19
|
+
max_tokens=1000,
|
|
20
|
+
temperature=0.0,
|
|
21
|
+
system="Respond only in Yoda-speak.",
|
|
22
|
+
messages=[
|
|
23
|
+
{"role": "user", "content": "How are you today?"}
|
|
24
|
+
],
|
|
25
|
+
stream=True
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
for response in message:
|
|
29
|
+
pass
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/basic.py
RENAMED
|
@@ -2,9 +2,8 @@ import chromadb
|
|
|
2
2
|
from chromadb.utils import embedding_functions
|
|
3
3
|
from dotenv import find_dotenv, load_dotenv
|
|
4
4
|
|
|
5
|
-
from
|
|
6
|
-
from
|
|
7
|
-
with_langtrace_root_span
|
|
5
|
+
from langtrace_python_sdk import langtrace
|
|
6
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
8
7
|
|
|
9
8
|
_ = load_dotenv(find_dotenv())
|
|
10
9
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/basic.py
RENAMED
|
@@ -7,9 +7,8 @@ from langchain_core.prompts.chat import ChatPromptTemplate
|
|
|
7
7
|
from langchain_core.runnables import RunnablePassthrough
|
|
8
8
|
from langchain_openai import ChatOpenAI, OpenAIEmbeddings
|
|
9
9
|
|
|
10
|
-
from
|
|
11
|
-
from
|
|
12
|
-
with_langtrace_root_span
|
|
10
|
+
from langtrace_python_sdk import langtrace
|
|
11
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
13
12
|
|
|
14
13
|
_ = load_dotenv(find_dotenv())
|
|
15
14
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/tool.py
RENAMED
|
@@ -6,9 +6,8 @@ from langchain_core.pydantic_v1 import BaseModel, Field
|
|
|
6
6
|
from langchain_core.tools import Tool
|
|
7
7
|
from langchain_openai import ChatOpenAI
|
|
8
8
|
|
|
9
|
-
from
|
|
10
|
-
from
|
|
11
|
-
with_langtrace_root_span
|
|
9
|
+
from langtrace_python_sdk import langtrace
|
|
10
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
12
11
|
|
|
13
12
|
_ = load_dotenv(find_dotenv())
|
|
14
13
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/llamaindex_example/basic.py
RENAMED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from llama_index.core import SimpleDirectoryReader, VectorStoreIndex
|
|
3
3
|
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
with_langtrace_root_span
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
7
6
|
|
|
8
7
|
_ = load_dotenv(find_dotenv())
|
|
9
8
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/chat_completion.py
RENAMED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
with_langtrace_root_span
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
7
6
|
|
|
8
7
|
_ = load_dotenv(find_dotenv())
|
|
9
8
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/embeddings_create.py
RENAMED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
with_langtrace_root_span
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
7
6
|
|
|
8
7
|
_ = load_dotenv(find_dotenv())
|
|
9
8
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/function_calling.py
RENAMED
|
@@ -3,9 +3,8 @@ import json
|
|
|
3
3
|
from dotenv import find_dotenv, load_dotenv
|
|
4
4
|
from openai import OpenAI
|
|
5
5
|
|
|
6
|
-
from
|
|
7
|
-
from
|
|
8
|
-
with_langtrace_root_span
|
|
6
|
+
from langtrace_python_sdk import langtrace
|
|
7
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
9
8
|
|
|
10
9
|
_ = load_dotenv(find_dotenv())
|
|
11
10
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/openai/images_generate.py
RENAMED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
from dotenv import find_dotenv, load_dotenv
|
|
2
2
|
from openai import OpenAI
|
|
3
3
|
|
|
4
|
-
from
|
|
5
|
-
from
|
|
6
|
-
with_langtrace_root_span
|
|
4
|
+
from langtrace_python_sdk import langtrace
|
|
5
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
7
6
|
|
|
8
7
|
_ = load_dotenv(find_dotenv())
|
|
9
8
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/pinecone_example/basic.py
RENAMED
|
@@ -5,9 +5,8 @@ from dotenv import find_dotenv, load_dotenv
|
|
|
5
5
|
from openai import OpenAI
|
|
6
6
|
from pinecone import Pinecone
|
|
7
7
|
|
|
8
|
-
from
|
|
9
|
-
from
|
|
10
|
-
with_langtrace_root_span
|
|
8
|
+
from langtrace_python_sdk import langtrace
|
|
9
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
11
10
|
|
|
12
11
|
_ = load_dotenv(find_dotenv())
|
|
13
12
|
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/__init__.py
RENAMED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"""
|
|
2
2
|
This module is the entry point for the package. It exports the `init` function"""
|
|
3
|
-
from
|
|
4
|
-
from
|
|
5
|
-
with_langtrace_root_span
|
|
3
|
+
from langtrace_python_sdk import langtrace
|
|
4
|
+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
|
|
6
5
|
|
|
7
6
|
__all__ = ['langtrace', 'with_langtrace_root_span']
|
|
@@ -7,12 +7,13 @@ TIKTOKEN_MODEL_MAPPING = {
|
|
|
7
7
|
}
|
|
8
8
|
|
|
9
9
|
SERVICE_PROVIDERS = {
|
|
10
|
-
"
|
|
10
|
+
"ANTHROPIC": "Anthropic",
|
|
11
11
|
"AZURE": "Azure",
|
|
12
|
+
"CHROMA": "Chroma",
|
|
12
13
|
"LANGCHAIN": "Langchain",
|
|
13
|
-
"LANGCHAIN_CORE": "Langchain Core",
|
|
14
14
|
"LANGCHAIN_COMMUNITY": "Langchain Community",
|
|
15
|
-
"
|
|
15
|
+
"LANGCHAIN_CORE": "Langchain Core",
|
|
16
16
|
"LLAMAINDEX": "LlamaIndex",
|
|
17
|
-
"
|
|
17
|
+
"OPENAI": "OpenAI",
|
|
18
|
+
"PINECONE": "Pinecone",
|
|
18
19
|
}
|
|
@@ -6,7 +6,7 @@ import requests
|
|
|
6
6
|
from opentelemetry.sdk.trace.export import (ReadableSpan, SpanExporter,
|
|
7
7
|
SpanExportResult)
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
from opentelemetry.trace.span import format_trace_id
|
|
10
10
|
class LangTraceExporter(SpanExporter):
|
|
11
11
|
api_key: str
|
|
12
12
|
url: str
|
|
@@ -37,7 +37,7 @@ class LangTraceExporter(SpanExporter):
|
|
|
37
37
|
|
|
38
38
|
data = [
|
|
39
39
|
{
|
|
40
|
-
'traceId': span.get_span_context().trace_id,
|
|
40
|
+
'traceId': format_trace_id(span.get_span_context().trace_id),
|
|
41
41
|
'instrumentationLibrary': span.instrumentation_info.__repr__(),
|
|
42
42
|
'droppedEventsCount': span.dropped_events,
|
|
43
43
|
'droppedAttributesCount': span.dropped_attributes,
|
langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Instrumentation for Anthropic
|
|
3
|
+
"""
|
|
4
|
+
import importlib.metadata
|
|
5
|
+
from typing import Collection
|
|
6
|
+
|
|
7
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
8
|
+
from opentelemetry.trace import get_tracer
|
|
9
|
+
from wrapt import wrap_function_wrapper
|
|
10
|
+
|
|
11
|
+
from langtrace_python_sdk.instrumentation.anthropic.patch import \
|
|
12
|
+
messages_create
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AnthropicInstrumentation(BaseInstrumentor):
|
|
16
|
+
"""
|
|
17
|
+
The AnthropicInstrumentation class represents the Anthropic instrumentation
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
21
|
+
return ["anthropic >= 0.19.1"]
|
|
22
|
+
|
|
23
|
+
def _instrument(self, **kwargs):
|
|
24
|
+
tracer_provider = kwargs.get("tracer_provider")
|
|
25
|
+
tracer = get_tracer(__name__, "", tracer_provider)
|
|
26
|
+
version = importlib.metadata.version('anthropic')
|
|
27
|
+
|
|
28
|
+
wrap_function_wrapper(
|
|
29
|
+
'anthropic.resources.messages',
|
|
30
|
+
'Messages.create',
|
|
31
|
+
messages_create(
|
|
32
|
+
'anthropic.messages.create', version, tracer)
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
def _instrument_module(self, module_name):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
def _uninstrument(self, **kwargs):
|
|
39
|
+
pass
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the patching logic for the Anthropic library."""
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from langtrace.trace_attributes import Event, LLMSpanAttributes
|
|
6
|
+
from opentelemetry.trace import SpanKind
|
|
7
|
+
from opentelemetry.trace.status import Status, StatusCode
|
|
8
|
+
|
|
9
|
+
from langtrace_python_sdk.constants.instrumentation.anthropic import APIS
|
|
10
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
11
|
+
SERVICE_PROVIDERS
|
|
12
|
+
from langtrace_python_sdk.utils.llm import estimate_tokens
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def messages_create(original_method, version, tracer):
|
|
16
|
+
"""Wrap the `messages_create` method."""
|
|
17
|
+
def traced_method(wrapped, instance, args, kwargs):
|
|
18
|
+
base_url = str(instance._client._base_url) if hasattr(
|
|
19
|
+
instance, '_client') and hasattr(instance._client, '_base_url') else ""
|
|
20
|
+
service_provider = SERVICE_PROVIDERS['ANTHROPIC']
|
|
21
|
+
span_attributes = {
|
|
22
|
+
"langtrace.service.name": service_provider,
|
|
23
|
+
"langtrace.service.type": "llm",
|
|
24
|
+
"langtrace.service.version": version,
|
|
25
|
+
"langtrace.version": "1.0.0",
|
|
26
|
+
"url.full": base_url,
|
|
27
|
+
"llm.api": APIS["MESSAGES_CREATE"]["ENDPOINT"],
|
|
28
|
+
"llm.model": kwargs.get('model'),
|
|
29
|
+
"llm.prompts": json.dumps(kwargs.get('messages', [])),
|
|
30
|
+
"llm.stream": kwargs.get('stream'),
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
attributes = LLMSpanAttributes(**span_attributes)
|
|
34
|
+
|
|
35
|
+
if kwargs.get('temperature') is not None:
|
|
36
|
+
attributes.llm_temperature = kwargs.get('temperature')
|
|
37
|
+
if kwargs.get('top_p') is not None:
|
|
38
|
+
attributes.llm_top_p = kwargs.get('top_p')
|
|
39
|
+
if kwargs.get('top_k') is not None:
|
|
40
|
+
attributes.llm_top_p = kwargs.get('top_k')
|
|
41
|
+
if kwargs.get('user') is not None:
|
|
42
|
+
attributes.llm_user = kwargs.get('user')
|
|
43
|
+
|
|
44
|
+
span = tracer.start_span(
|
|
45
|
+
APIS["MESSAGES_CREATE"]["METHOD"], kind=SpanKind.CLIENT)
|
|
46
|
+
for field, value in attributes.model_dump(by_alias=True).items():
|
|
47
|
+
if value is not None:
|
|
48
|
+
span.set_attribute(field, value)
|
|
49
|
+
try:
|
|
50
|
+
# Attempt to call the original method
|
|
51
|
+
result = wrapped(*args, **kwargs)
|
|
52
|
+
if kwargs.get('stream') is False:
|
|
53
|
+
if hasattr(result, 'content') and result.content is not None:
|
|
54
|
+
span.set_attribute(
|
|
55
|
+
"llm.responses", json.dumps([{
|
|
56
|
+
"text": result.content[0].text,
|
|
57
|
+
"type": result.content[0].type
|
|
58
|
+
}]))
|
|
59
|
+
else:
|
|
60
|
+
responses = []
|
|
61
|
+
span.set_attribute(
|
|
62
|
+
"llm.responses", json.dumps(responses))
|
|
63
|
+
if hasattr(result, 'system_fingerprint') and \
|
|
64
|
+
result.system_fingerprint is not None:
|
|
65
|
+
span.set_attribute(
|
|
66
|
+
"llm.system.fingerprint", result.system_fingerprint)
|
|
67
|
+
# Get the usage
|
|
68
|
+
if hasattr(result, 'usage') and result.usage is not None:
|
|
69
|
+
usage = result.usage
|
|
70
|
+
if usage is not None:
|
|
71
|
+
usage_dict = {
|
|
72
|
+
"input_tokens": usage.input_tokens,
|
|
73
|
+
"output_tokens": usage.output_tokens,
|
|
74
|
+
"total_tokens": usage.input_tokens + usage.output_tokens
|
|
75
|
+
}
|
|
76
|
+
span.set_attribute(
|
|
77
|
+
"llm.token.counts", json.dumps(usage_dict))
|
|
78
|
+
span.set_status(StatusCode.OK)
|
|
79
|
+
span.end()
|
|
80
|
+
return result
|
|
81
|
+
else:
|
|
82
|
+
return handle_streaming_response(result, span)
|
|
83
|
+
except Exception as e:
|
|
84
|
+
# Record the exception in the span
|
|
85
|
+
span.record_exception(e)
|
|
86
|
+
# Set the span status to indicate an error
|
|
87
|
+
span.set_status(Status(StatusCode.ERROR, str(e)))
|
|
88
|
+
# Reraise the exception to ensure it's not swallowed
|
|
89
|
+
span.end()
|
|
90
|
+
raise
|
|
91
|
+
|
|
92
|
+
def handle_streaming_response(result, span):
|
|
93
|
+
"""Process and yield streaming response chunks."""
|
|
94
|
+
result_content = []
|
|
95
|
+
span.add_event(Event.STREAM_START.value)
|
|
96
|
+
input_tokens = 0
|
|
97
|
+
output_tokens = 0
|
|
98
|
+
try:
|
|
99
|
+
for chunk in result:
|
|
100
|
+
content = ""
|
|
101
|
+
if hasattr(chunk, 'delta') and chunk.delta is not None:
|
|
102
|
+
content = chunk.delta.text if hasattr(
|
|
103
|
+
chunk.delta, 'text') else ""
|
|
104
|
+
# Assuming content needs to be aggregated before processing
|
|
105
|
+
result_content.append(content if len(content) > 0 else "")
|
|
106
|
+
|
|
107
|
+
if hasattr(chunk, 'message') and hasattr(chunk.message, 'usage'):
|
|
108
|
+
input_tokens += chunk.message.usage.input_tokens if hasattr(
|
|
109
|
+
chunk.message.usage, 'input_tokens') else 0
|
|
110
|
+
output_tokens += chunk.message.usage.output_tokens if hasattr(
|
|
111
|
+
chunk.message.usage, 'output_tokens') else 0
|
|
112
|
+
|
|
113
|
+
# Assuming span.add_event is part of a larger logging or event system
|
|
114
|
+
# Add event for each chunk of content
|
|
115
|
+
if content:
|
|
116
|
+
span.add_event(Event.STREAM_OUTPUT.value, {
|
|
117
|
+
"response": "".join(content)
|
|
118
|
+
})
|
|
119
|
+
|
|
120
|
+
# Assuming this is part of a generator, yield chunk or aggregated content
|
|
121
|
+
yield content
|
|
122
|
+
finally:
|
|
123
|
+
|
|
124
|
+
# Finalize span after processing all chunks
|
|
125
|
+
span.add_event(Event.STREAM_END.value)
|
|
126
|
+
span.set_attribute("llm.token.counts", json.dumps({
|
|
127
|
+
"input_tokens": input_tokens,
|
|
128
|
+
"output_tokens": output_tokens,
|
|
129
|
+
"total_tokens": input_tokens + output_tokens
|
|
130
|
+
}))
|
|
131
|
+
span.set_attribute("llm.responses", json.dumps(
|
|
132
|
+
[{"text": "".join(result_content)}]))
|
|
133
|
+
span.set_status(StatusCode.OK)
|
|
134
|
+
span.end()
|
|
135
|
+
|
|
136
|
+
# return the wrapped method
|
|
137
|
+
return traced_method
|
|
@@ -8,9 +8,8 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
8
8
|
from opentelemetry.trace import get_tracer
|
|
9
9
|
from wrapt import wrap_function_wrapper
|
|
10
10
|
|
|
11
|
-
from
|
|
12
|
-
from
|
|
13
|
-
collection_patch
|
|
11
|
+
from langtrace_python_sdk.constants.instrumentation.chroma import APIS
|
|
12
|
+
from langtrace_python_sdk.instrumentation.chroma.patch import collection_patch
|
|
14
13
|
|
|
15
14
|
|
|
16
15
|
class ChromaInstrumentation(BaseInstrumentor):
|
|
@@ -5,8 +5,8 @@ from langtrace.trace_attributes import DatabaseSpanAttributes
|
|
|
5
5
|
from opentelemetry.trace import SpanKind
|
|
6
6
|
from opentelemetry.trace.status import Status, StatusCode
|
|
7
7
|
|
|
8
|
-
from
|
|
9
|
-
from
|
|
8
|
+
from langtrace_python_sdk.constants.instrumentation.chroma import APIS
|
|
9
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
10
10
|
SERVICE_PROVIDERS
|
|
11
11
|
|
|
12
12
|
|
|
@@ -9,8 +9,7 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
9
9
|
from opentelemetry.trace import get_tracer
|
|
10
10
|
from wrapt import wrap_function_wrapper
|
|
11
11
|
|
|
12
|
-
from
|
|
13
|
-
generic_patch
|
|
12
|
+
from langtrace_python_sdk.instrumentation.langchain.patch import generic_patch
|
|
14
13
|
|
|
15
14
|
|
|
16
15
|
def patch_module_classes(module_name, tracer, version, task, trace_output=True, trace_input=True):
|
|
@@ -7,7 +7,7 @@ from langtrace.trace_attributes import FrameworkSpanAttributes
|
|
|
7
7
|
from opentelemetry.trace import SpanKind, StatusCode
|
|
8
8
|
from opentelemetry.trace.status import Status
|
|
9
9
|
|
|
10
|
-
from
|
|
10
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
11
11
|
SERVICE_PROVIDERS
|
|
12
12
|
|
|
13
13
|
|
|
@@ -9,7 +9,7 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
9
9
|
from opentelemetry.trace import get_tracer
|
|
10
10
|
from wrapt import wrap_function_wrapper
|
|
11
11
|
|
|
12
|
-
from
|
|
12
|
+
from langtrace_python_sdk.instrumentation.langchain_community.patch import \
|
|
13
13
|
generic_patch
|
|
14
14
|
|
|
15
15
|
|
|
@@ -4,7 +4,7 @@ from langtrace.trace_attributes import FrameworkSpanAttributes
|
|
|
4
4
|
from opentelemetry.trace import SpanKind
|
|
5
5
|
from opentelemetry.trace.status import Status, StatusCode
|
|
6
6
|
|
|
7
|
-
from
|
|
7
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
8
8
|
SERVICE_PROVIDERS
|
|
9
9
|
|
|
10
10
|
|
|
@@ -9,7 +9,7 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
9
9
|
from opentelemetry.trace import get_tracer
|
|
10
10
|
from wrapt import wrap_function_wrapper
|
|
11
11
|
|
|
12
|
-
from
|
|
12
|
+
from langtrace_python_sdk.instrumentation.langchain_core.patch import (
|
|
13
13
|
generic_patch, runnable_patch)
|
|
14
14
|
|
|
15
15
|
|
|
@@ -7,7 +7,7 @@ from langtrace.trace_attributes import FrameworkSpanAttributes
|
|
|
7
7
|
from opentelemetry.trace import SpanKind, StatusCode
|
|
8
8
|
from opentelemetry.trace.status import Status
|
|
9
9
|
|
|
10
|
-
from
|
|
10
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
11
11
|
SERVICE_PROVIDERS
|
|
12
12
|
|
|
13
13
|
|
|
@@ -9,8 +9,7 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
9
9
|
from opentelemetry.trace import get_tracer
|
|
10
10
|
from wrapt import wrap_function_wrapper
|
|
11
11
|
|
|
12
|
-
from
|
|
13
|
-
generic_patch
|
|
12
|
+
from langtrace_python_sdk.instrumentation.llamaindex.patch import generic_patch
|
|
14
13
|
|
|
15
14
|
|
|
16
15
|
class LlamaindexInstrumentation(BaseInstrumentor):
|
|
@@ -5,7 +5,7 @@ from langtrace.trace_attributes import FrameworkSpanAttributes
|
|
|
5
5
|
from opentelemetry.trace import SpanKind
|
|
6
6
|
from opentelemetry.trace.status import Status, StatusCode
|
|
7
7
|
|
|
8
|
-
from
|
|
8
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
9
9
|
SERVICE_PROVIDERS
|
|
10
10
|
|
|
11
11
|
|
|
@@ -6,7 +6,7 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
6
6
|
from opentelemetry.trace import get_tracer
|
|
7
7
|
from wrapt import wrap_function_wrapper
|
|
8
8
|
|
|
9
|
-
from
|
|
9
|
+
from langtrace_python_sdk.instrumentation.openai.patch import (
|
|
10
10
|
chat_completions_create, embeddings_create, images_generate)
|
|
11
11
|
|
|
12
12
|
|
|
@@ -6,11 +6,11 @@ from langtrace.trace_attributes import Event, LLMSpanAttributes
|
|
|
6
6
|
from opentelemetry.trace import SpanKind
|
|
7
7
|
from opentelemetry.trace.status import Status, StatusCode
|
|
8
8
|
|
|
9
|
-
from
|
|
9
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
10
10
|
SERVICE_PROVIDERS
|
|
11
|
-
from
|
|
12
|
-
from
|
|
13
|
-
|
|
11
|
+
from langtrace_python_sdk.constants.instrumentation.openai import APIS
|
|
12
|
+
from langtrace_python_sdk.utils.llm import (calculate_prompt_tokens,
|
|
13
|
+
estimate_tokens)
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def images_generate(original_method, version, tracer):
|
|
File without changes
|
|
@@ -11,9 +11,8 @@ from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
|
11
11
|
from opentelemetry.trace import get_tracer
|
|
12
12
|
from wrapt import wrap_function_wrapper
|
|
13
13
|
|
|
14
|
-
from
|
|
15
|
-
from
|
|
16
|
-
generic_patch
|
|
14
|
+
from langtrace_python_sdk.constants.instrumentation.pinecone import APIS
|
|
15
|
+
from langtrace_python_sdk.instrumentation.pinecone.patch import generic_patch
|
|
17
16
|
|
|
18
17
|
|
|
19
18
|
class PineconeInstrumentation(BaseInstrumentor):
|
|
@@ -4,9 +4,9 @@ from langtrace.trace_attributes import DatabaseSpanAttributes
|
|
|
4
4
|
from opentelemetry.trace import SpanKind
|
|
5
5
|
from opentelemetry.trace.status import Status, StatusCode
|
|
6
6
|
|
|
7
|
-
from
|
|
7
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
8
8
|
SERVICE_PROVIDERS
|
|
9
|
-
from
|
|
9
|
+
from langtrace_python_sdk.constants.instrumentation.pinecone import APIS
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
def generic_patch(original_method, method, version, tracer):
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/langtrace.py
RENAMED
|
@@ -4,21 +4,23 @@ from opentelemetry.sdk.trace.export import (BatchSpanProcessor,
|
|
|
4
4
|
ConsoleSpanExporter,
|
|
5
5
|
SimpleSpanProcessor)
|
|
6
6
|
|
|
7
|
-
from
|
|
7
|
+
from langtrace_python_sdk.extensions.langtrace_exporter import \
|
|
8
8
|
LangTraceExporter
|
|
9
|
-
from
|
|
9
|
+
from langtrace_python_sdk.instrumentation.anthropic.instrumentation import \
|
|
10
|
+
AnthropicInstrumentation
|
|
11
|
+
from langtrace_python_sdk.instrumentation.chroma.instrumentation import \
|
|
10
12
|
ChromaInstrumentation
|
|
11
|
-
from
|
|
13
|
+
from langtrace_python_sdk.instrumentation.langchain.instrumentation import \
|
|
12
14
|
LangchainInstrumentation
|
|
13
|
-
from
|
|
15
|
+
from langtrace_python_sdk.instrumentation.langchain_community.instrumentation import \
|
|
14
16
|
LangchainCommunityInstrumentation
|
|
15
|
-
from
|
|
17
|
+
from langtrace_python_sdk.instrumentation.langchain_core.instrumentation import \
|
|
16
18
|
LangchainCoreInstrumentation
|
|
17
|
-
from
|
|
19
|
+
from langtrace_python_sdk.instrumentation.llamaindex.instrumentation import \
|
|
18
20
|
LlamaindexInstrumentation
|
|
19
|
-
from
|
|
21
|
+
from langtrace_python_sdk.instrumentation.openai.instrumentation import \
|
|
20
22
|
OpenAIInstrumentation
|
|
21
|
-
from
|
|
23
|
+
from langtrace_python_sdk.instrumentation.pinecone.instrumentation import \
|
|
22
24
|
PineconeInstrumentation
|
|
23
25
|
|
|
24
26
|
|
|
@@ -61,6 +63,7 @@ def init(
|
|
|
61
63
|
langchain_instrumentation = LangchainInstrumentation()
|
|
62
64
|
langchain_core_instrumentation = LangchainCoreInstrumentation()
|
|
63
65
|
langchain_community_instrumentation = LangchainCommunityInstrumentation()
|
|
66
|
+
anthropic_instrumentation = AnthropicInstrumentation()
|
|
64
67
|
|
|
65
68
|
# Call the instrument method with some arguments
|
|
66
69
|
openai_instrumentation.instrument()
|
|
@@ -70,3 +73,4 @@ def init(
|
|
|
70
73
|
langchain_instrumentation.instrument()
|
|
71
74
|
langchain_core_instrumentation.instrument()
|
|
72
75
|
langchain_community_instrumentation.instrument()
|
|
76
|
+
anthropic_instrumentation.instrument()
|
|
File without changes
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/utils/llm.py
RENAMED
|
@@ -5,9 +5,9 @@ to calculate the price of a model based on its usage.
|
|
|
5
5
|
|
|
6
6
|
from tiktoken import get_encoding
|
|
7
7
|
|
|
8
|
-
from
|
|
8
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
9
9
|
TIKTOKEN_MODEL_MAPPING
|
|
10
|
-
from
|
|
10
|
+
from langtrace_python_sdk.constants.instrumentation.openai import \
|
|
11
11
|
OPENAI_COST_TABLE
|
|
12
12
|
|
|
13
13
|
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: langtrace-python-sdk
|
|
3
|
+
Version: 1.1.5
|
|
4
|
+
Summary: Python SDK for LangTrace
|
|
5
|
+
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
+
Author: Scale3 Labs
|
|
7
|
+
Author-email: engineering@scale3labs.com
|
|
8
|
+
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
+
License: AGPL-3.0-or-later
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.6
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
|
|
17
|
+
<h1 align="center">LangTrace</h1>
|
|
18
|
+
|
|
19
|
+
Looking for the Typescript version? Check out [langtrace-typescript](https://github.com/Scale3-Labs/langtrace-typescript-sdk).
|
|
20
|
+
|
|
21
|
+
LangTrace is a set of extensions built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM application. Because it uses OpenTelemetry under the hood.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
The repo contains standard OpenTelemetry instrumentations for LLM providers and Vector DBs, while still outputting standard OpenTelemetry data that can be connected to your observability stack.
|
|
25
|
+
If you already have OpenTelemetry instrumented, you can just add any of our instrumentations directly.
|
|
26
|
+
|
|
27
|
+
## 🚀 Getting Started
|
|
28
|
+
|
|
29
|
+
The easiest way to get started is to use our SDK.
|
|
30
|
+
|
|
31
|
+
Install the SDK:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install langtrace-python-sdk
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Then, to start instrumenting your code, just add this line to your code:
|
|
38
|
+
|
|
39
|
+
```python
|
|
40
|
+
from langtrace_python_sdk import langtrace
|
|
41
|
+
|
|
42
|
+
langtrace.init()
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
That's it. You're now tracing your code with LangTrace!
|
|
46
|
+
If you want to see the traces you can enable logging
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
langtrace.init(log_spans_to_console=True)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
If you want to export traces to an external endpoint, you will need to add ```LANGTRACE_URL``` to ```.env``` file.
|
|
53
|
+
```python
|
|
54
|
+
langtrace.init(write_to_remote_url=True)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
## 🪗 What do we instrument?
|
|
60
|
+
|
|
61
|
+
OpenLLMetry can instrument everything that [OpenTelemetry already instruments](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation) - so things like your DB, API calls, and more. On top of that, we built a set of custom extensions that instrument things like your calls to OpenAI or Anthropic, or your Vector DB like Chroma, Pinecone, Qdrant or Weaviate.
|
|
62
|
+
|
|
63
|
+
### LLM Providers
|
|
64
|
+
|
|
65
|
+
- ✅ OpenAI / Azure OpenAI
|
|
66
|
+
- ✅ Anthropic
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
### Vector DBs
|
|
72
|
+
|
|
73
|
+
- ✅ Chroma
|
|
74
|
+
- ✅ Pinecone
|
|
75
|
+
|
|
76
|
+
### Frameworks
|
|
77
|
+
|
|
78
|
+
- ✅ LangChain
|
|
79
|
+
- ✅ [LlamaIndex](https://docs.llamaindex.ai/en/stable/module_guides/observability/observability.html#openllmetry)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
@@ -2,6 +2,8 @@ LICENSE
|
|
|
2
2
|
README.md
|
|
3
3
|
setup.py
|
|
4
4
|
src/examples/__init__.py
|
|
5
|
+
src/examples/anthropic_example/__init__.py
|
|
6
|
+
src/examples/anthropic_example/completion.py
|
|
5
7
|
src/examples/chroma_example/__init__.py
|
|
6
8
|
src/examples/chroma_example/basic.py
|
|
7
9
|
src/examples/langchain_example/__init__.py
|
|
@@ -24,6 +26,7 @@ src/langtrace_python_sdk.egg-info/dependency_links.txt
|
|
|
24
26
|
src/langtrace_python_sdk.egg-info/top_level.txt
|
|
25
27
|
src/langtrace_python_sdk/constants/__init__.py
|
|
26
28
|
src/langtrace_python_sdk/constants/instrumentation/__init__.py
|
|
29
|
+
src/langtrace_python_sdk/constants/instrumentation/anthropic.py
|
|
27
30
|
src/langtrace_python_sdk/constants/instrumentation/chroma.py
|
|
28
31
|
src/langtrace_python_sdk/constants/instrumentation/common.py
|
|
29
32
|
src/langtrace_python_sdk/constants/instrumentation/openai.py
|
|
@@ -31,8 +34,10 @@ src/langtrace_python_sdk/constants/instrumentation/pinecone.py
|
|
|
31
34
|
src/langtrace_python_sdk/extensions/__init__.py
|
|
32
35
|
src/langtrace_python_sdk/extensions/langtrace_exporter.py
|
|
33
36
|
src/langtrace_python_sdk/instrumentation/__init__.py
|
|
37
|
+
src/langtrace_python_sdk/instrumentation/anthropic/__init__.py
|
|
38
|
+
src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py
|
|
39
|
+
src/langtrace_python_sdk/instrumentation/anthropic/patch.py
|
|
34
40
|
src/langtrace_python_sdk/instrumentation/chroma/__init__.py
|
|
35
|
-
src/langtrace_python_sdk/instrumentation/chroma/apis.py
|
|
36
41
|
src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py
|
|
37
42
|
src/langtrace_python_sdk/instrumentation/chroma/patch.py
|
|
38
43
|
src/langtrace_python_sdk/instrumentation/langchain/__init__.py
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: langtrace-python-sdk
|
|
3
|
-
Version: 1.1.2
|
|
4
|
-
Summary: Python SDK for LangTrace
|
|
5
|
-
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
-
Author: Scale3 Labs
|
|
7
|
-
Author-email: engineering@scale3labs.com
|
|
8
|
-
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
-
License: AGPL-3.0-or-later
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.6
|
|
14
|
-
Description-Content-Type: text/markdown
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
|
|
17
|
-
LangTrace - Python SDK
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
# langtrace-python-sdk
|
|
2
|
-
|
|
3
|
-
export PYTHONPATH="/Users/karthikkalyanaraman/work/langtrace/python-sdk:$PYTHONPATH"
|
|
4
|
-
|
|
5
|
-
## Steps to run
|
|
6
|
-
|
|
7
|
-
1. From your root directory, create a virtualenv for installing your dependencies
|
|
8
|
-
```
|
|
9
|
-
python -m venv pysdk
|
|
10
|
-
```
|
|
11
|
-
2. Activate the virtualenv
|
|
12
|
-
```
|
|
13
|
-
source pysdk/bin/activate
|
|
14
|
-
```
|
|
15
|
-
3. Install the dependencies
|
|
16
|
-
```
|
|
17
|
-
pip install -r requirements.txt
|
|
18
|
-
```
|
|
19
|
-
4. Run the example and see the traces on the terminal
|
|
20
|
-
```
|
|
21
|
-
python src/entrypoint.py
|
|
22
|
-
```
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
from langtrace.trace_attributes import ChromaDBMethods
|
|
2
|
-
|
|
3
|
-
APIS = {
|
|
4
|
-
"ADD": {
|
|
5
|
-
"METHOD": ChromaDBMethods.ADD.value,
|
|
6
|
-
"OPERATION": "add",
|
|
7
|
-
},
|
|
8
|
-
"GET": {
|
|
9
|
-
"METHOD": ChromaDBMethods.GET.value,
|
|
10
|
-
"OPERATION": "get",
|
|
11
|
-
},
|
|
12
|
-
"QUERY": {
|
|
13
|
-
"METHOD": ChromaDBMethods.QUERY.value,
|
|
14
|
-
"OPERATION": "query",
|
|
15
|
-
},
|
|
16
|
-
"DELETE": {
|
|
17
|
-
"METHOD": ChromaDBMethods.DELETE.value,
|
|
18
|
-
"OPERATION": "delete",
|
|
19
|
-
},
|
|
20
|
-
"PEEK": {
|
|
21
|
-
"METHOD": ChromaDBMethods.PEEK.value,
|
|
22
|
-
"OPERATION": "peek",
|
|
23
|
-
},
|
|
24
|
-
"UPDATE": {
|
|
25
|
-
"METHOD": ChromaDBMethods.UPDATE.value,
|
|
26
|
-
"OPERATION": "update",
|
|
27
|
-
},
|
|
28
|
-
"UPSERT": {
|
|
29
|
-
"METHOD": ChromaDBMethods.UPSERT.value,
|
|
30
|
-
"OPERATION": "upsert",
|
|
31
|
-
},
|
|
32
|
-
"MODIFY": {
|
|
33
|
-
"METHOD": ChromaDBMethods.MODIFY.value,
|
|
34
|
-
"OPERATION": "modify",
|
|
35
|
-
},
|
|
36
|
-
"COUNT": {
|
|
37
|
-
"METHOD": ChromaDBMethods.COUNT.value,
|
|
38
|
-
"OPERATION": "count",
|
|
39
|
-
},
|
|
40
|
-
}
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: langtrace-python-sdk
|
|
3
|
-
Version: 1.1.2
|
|
4
|
-
Summary: Python SDK for LangTrace
|
|
5
|
-
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
-
Author: Scale3 Labs
|
|
7
|
-
Author-email: engineering@scale3labs.com
|
|
8
|
-
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
-
License: AGPL-3.0-or-later
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.6
|
|
14
|
-
Description-Content-Type: text/markdown
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
|
|
17
|
-
LangTrace - Python SDK
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.2 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|