langtrace-python-sdk 1.1.3__tar.gz → 1.1.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langtrace-python-sdk-1.1.5/PKG-INFO +83 -0
- langtrace-python-sdk-1.1.5/README.md +67 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/setup.py +5 -3
- langtrace-python-sdk-1.1.5/src/examples/anthropic_example/completion.py +29 -0
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/constants/instrumentation/anthropic.py +6 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/common.py +5 -4
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/extensions/langtrace_exporter.py +2 -2
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py +39 -0
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic/patch.py +137 -0
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/pinecone/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/langtrace.py +4 -0
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/utils/__init__.py +0 -0
- langtrace-python-sdk-1.1.5/src/langtrace_python_sdk.egg-info/PKG-INFO +83 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk.egg-info/SOURCES.txt +6 -1
- langtrace-python-sdk-1.1.3/PKG-INFO +0 -17
- langtrace-python-sdk-1.1.3/README.md +0 -22
- langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/chroma/apis.py +0 -40
- langtrace-python-sdk-1.1.3/src/langtrace_python_sdk.egg-info/PKG-INFO +0 -17
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/LICENSE +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/setup.cfg +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3/src/examples/langchain_example → langtrace-python-sdk-1.1.5/src/examples/anthropic_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/basic.py +0 -0
- {langtrace-python-sdk-1.1.3/src/examples/llamaindex_example → langtrace-python-sdk-1.1.5/src/examples/langchain_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/basic.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/tool.py +0 -0
- {langtrace-python-sdk-1.1.3/src/examples/openai → langtrace-python-sdk-1.1.5/src/examples/llamaindex_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/llamaindex_example/basic.py +0 -0
- {langtrace-python-sdk-1.1.3/src/examples/pinecone_example → langtrace-python-sdk-1.1.5/src/examples/openai}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/chat_completion.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/embeddings_create.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/function_calling.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/images_generate.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/constants → langtrace-python-sdk-1.1.5/src/examples/pinecone_example}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/pinecone_example/basic.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/constants/instrumentation → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/constants}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/extensions → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/constants/instrumentation}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/chroma.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/openai.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/constants/instrumentation/pinecone.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/extensions}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/chroma → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/langchain → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/langchain_community → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/chroma}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/chroma/patch.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/langchain_core → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/langchain}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain/patch.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/llamaindex → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/langchain_community}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_community/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_community/patch.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/openai → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/langchain_core}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_core/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/langchain_core/patch.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/instrumentation/pinecone → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/llamaindex}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/llamaindex/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/llamaindex/patch.py +0 -0
- {langtrace-python-sdk-1.1.3/src/langtrace_python_sdk/utils → langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/openai}/__init__.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/openai/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/openai/patch.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/pinecone/instrumentation.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/instrumentation/pinecone/patch.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/utils/llm.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/utils/with_root_span.py +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk.egg-info/dependency_links.txt +0 -0
- {langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: langtrace-python-sdk
|
|
3
|
+
Version: 1.1.5
|
|
4
|
+
Summary: Python SDK for LangTrace
|
|
5
|
+
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
+
Author: Scale3 Labs
|
|
7
|
+
Author-email: engineering@scale3labs.com
|
|
8
|
+
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
+
License: AGPL-3.0-or-later
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.6
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
|
|
17
|
+
<h1 align="center">LangTrace</h1>
|
|
18
|
+
|
|
19
|
+
Looking for the Typescript version? Check out [langtrace-typescript](https://github.com/Scale3-Labs/langtrace-typescript-sdk).
|
|
20
|
+
|
|
21
|
+
LangTrace is a set of extensions built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM application. Because it uses OpenTelemetry under the hood.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
The repo contains standard OpenTelemetry instrumentations for LLM providers and Vector DBs, while still outputting standard OpenTelemetry data that can be connected to your observability stack.
|
|
25
|
+
If you already have OpenTelemetry instrumented, you can just add any of our instrumentations directly.
|
|
26
|
+
|
|
27
|
+
## 🚀 Getting Started
|
|
28
|
+
|
|
29
|
+
The easiest way to get started is to use our SDK.
|
|
30
|
+
|
|
31
|
+
Install the SDK:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install langtrace-python-sdk
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Then, to start instrumenting your code, just add this line to your code:
|
|
38
|
+
|
|
39
|
+
```python
|
|
40
|
+
from langtrace_python_sdk import langtrace
|
|
41
|
+
|
|
42
|
+
langtrace.init()
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
That's it. You're now tracing your code with LangTrace!
|
|
46
|
+
If you want to see the traces you can enable logging
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
langtrace.init(log_spans_to_console=True)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
If you want to export traces to an external endpoint, you will need to add ```LANGTRACE_URL``` to ```.env``` file.
|
|
53
|
+
```python
|
|
54
|
+
langtrace.init(write_to_remote_url=True)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
## 🪗 What do we instrument?
|
|
60
|
+
|
|
61
|
+
OpenLLMetry can instrument everything that [OpenTelemetry already instruments](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation) - so things like your DB, API calls, and more. On top of that, we built a set of custom extensions that instrument things like your calls to OpenAI or Anthropic, or your Vector DB like Chroma, Pinecone, Qdrant or Weaviate.
|
|
62
|
+
|
|
63
|
+
### LLM Providers
|
|
64
|
+
|
|
65
|
+
- ✅ OpenAI / Azure OpenAI
|
|
66
|
+
- ✅ Anthropic
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
### Vector DBs
|
|
72
|
+
|
|
73
|
+
- ✅ Chroma
|
|
74
|
+
- ✅ Pinecone
|
|
75
|
+
|
|
76
|
+
### Frameworks
|
|
77
|
+
|
|
78
|
+
- ✅ LangChain
|
|
79
|
+
- ✅ [LlamaIndex](https://docs.llamaindex.ai/en/stable/module_guides/observability/observability.html#openllmetry)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
<h1 align="center">LangTrace</h1>
|
|
2
|
+
|
|
3
|
+
Looking for the Typescript version? Check out [langtrace-typescript](https://github.com/Scale3-Labs/langtrace-typescript-sdk).
|
|
4
|
+
|
|
5
|
+
LangTrace is a set of extensions built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM application. Because it uses OpenTelemetry under the hood.
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
The repo contains standard OpenTelemetry instrumentations for LLM providers and Vector DBs, while still outputting standard OpenTelemetry data that can be connected to your observability stack.
|
|
9
|
+
If you already have OpenTelemetry instrumented, you can just add any of our instrumentations directly.
|
|
10
|
+
|
|
11
|
+
## 🚀 Getting Started
|
|
12
|
+
|
|
13
|
+
The easiest way to get started is to use our SDK.
|
|
14
|
+
|
|
15
|
+
Install the SDK:
|
|
16
|
+
|
|
17
|
+
```bash
|
|
18
|
+
pip install langtrace-python-sdk
|
|
19
|
+
```
|
|
20
|
+
|
|
21
|
+
Then, to start instrumenting your code, just add this line to your code:
|
|
22
|
+
|
|
23
|
+
```python
|
|
24
|
+
from langtrace_python_sdk import langtrace
|
|
25
|
+
|
|
26
|
+
langtrace.init()
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
That's it. You're now tracing your code with LangTrace!
|
|
30
|
+
If you want to see the traces you can enable logging
|
|
31
|
+
|
|
32
|
+
```python
|
|
33
|
+
langtrace.init(log_spans_to_console=True)
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
If you want to export traces to an external endpoint, you will need to add ```LANGTRACE_URL``` to ```.env``` file.
|
|
37
|
+
```python
|
|
38
|
+
langtrace.init(write_to_remote_url=True)
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
## 🪗 What do we instrument?
|
|
44
|
+
|
|
45
|
+
OpenLLMetry can instrument everything that [OpenTelemetry already instruments](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation) - so things like your DB, API calls, and more. On top of that, we built a set of custom extensions that instrument things like your calls to OpenAI or Anthropic, or your Vector DB like Chroma, Pinecone, Qdrant or Weaviate.
|
|
46
|
+
|
|
47
|
+
### LLM Providers
|
|
48
|
+
|
|
49
|
+
- ✅ OpenAI / Azure OpenAI
|
|
50
|
+
- ✅ Anthropic
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
### Vector DBs
|
|
56
|
+
|
|
57
|
+
- ✅ Chroma
|
|
58
|
+
- ✅ Pinecone
|
|
59
|
+
|
|
60
|
+
### Frameworks
|
|
61
|
+
|
|
62
|
+
- ✅ LangChain
|
|
63
|
+
- ✅ [LlamaIndex](https://docs.llamaindex.ai/en/stable/module_guides/observability/observability.html#openllmetry)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
|
|
@@ -1,15 +1,17 @@
|
|
|
1
1
|
from setuptools import find_packages, setup
|
|
2
|
-
|
|
2
|
+
def readme():
|
|
3
|
+
with open('README.md') as f:
|
|
4
|
+
return f.read()
|
|
3
5
|
setup(
|
|
4
6
|
name='langtrace-python-sdk', # Choose a unique name for PyPI
|
|
5
|
-
version='1.1.
|
|
7
|
+
version='1.1.5',
|
|
6
8
|
author='Scale3 Labs',
|
|
7
9
|
license="AGPL-3.0-or-later",
|
|
8
10
|
author_email='engineering@scale3labs.com',
|
|
9
11
|
maintainer=['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber',
|
|
10
12
|
'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi'],
|
|
11
13
|
description='Python SDK for LangTrace',
|
|
12
|
-
long_description=
|
|
14
|
+
long_description=readme(),
|
|
13
15
|
long_description_content_type='text/markdown',
|
|
14
16
|
url='https://github.com/Scale3-Labs/langtrace-python-sdk', # Project home page
|
|
15
17
|
package_dir={'': 'src'},
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
"""Example of using the anthropic API to create a message."""
|
|
2
|
+
import anthropic
|
|
3
|
+
from dotenv import find_dotenv, load_dotenv
|
|
4
|
+
|
|
5
|
+
from langtrace_python_sdk import langtrace
|
|
6
|
+
|
|
7
|
+
_ = load_dotenv(find_dotenv())
|
|
8
|
+
|
|
9
|
+
langtrace.init(batch=False, log_spans_to_console=True,
|
|
10
|
+
write_to_remote_url=False)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def messages_create():
|
|
14
|
+
|
|
15
|
+
client = anthropic.Anthropic()
|
|
16
|
+
|
|
17
|
+
message = client.messages.create(
|
|
18
|
+
model="claude-3-opus-20240229",
|
|
19
|
+
max_tokens=1000,
|
|
20
|
+
temperature=0.0,
|
|
21
|
+
system="Respond only in Yoda-speak.",
|
|
22
|
+
messages=[
|
|
23
|
+
{"role": "user", "content": "How are you today?"}
|
|
24
|
+
],
|
|
25
|
+
stream=True
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
for response in message:
|
|
29
|
+
pass
|
|
@@ -7,12 +7,13 @@ TIKTOKEN_MODEL_MAPPING = {
|
|
|
7
7
|
}
|
|
8
8
|
|
|
9
9
|
SERVICE_PROVIDERS = {
|
|
10
|
-
"
|
|
10
|
+
"ANTHROPIC": "Anthropic",
|
|
11
11
|
"AZURE": "Azure",
|
|
12
|
+
"CHROMA": "Chroma",
|
|
12
13
|
"LANGCHAIN": "Langchain",
|
|
13
|
-
"LANGCHAIN_CORE": "Langchain Core",
|
|
14
14
|
"LANGCHAIN_COMMUNITY": "Langchain Community",
|
|
15
|
-
"
|
|
15
|
+
"LANGCHAIN_CORE": "Langchain Core",
|
|
16
16
|
"LLAMAINDEX": "LlamaIndex",
|
|
17
|
-
"
|
|
17
|
+
"OPENAI": "OpenAI",
|
|
18
|
+
"PINECONE": "Pinecone",
|
|
18
19
|
}
|
|
@@ -6,7 +6,7 @@ import requests
|
|
|
6
6
|
from opentelemetry.sdk.trace.export import (ReadableSpan, SpanExporter,
|
|
7
7
|
SpanExportResult)
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
from opentelemetry.trace.span import format_trace_id
|
|
10
10
|
class LangTraceExporter(SpanExporter):
|
|
11
11
|
api_key: str
|
|
12
12
|
url: str
|
|
@@ -37,7 +37,7 @@ class LangTraceExporter(SpanExporter):
|
|
|
37
37
|
|
|
38
38
|
data = [
|
|
39
39
|
{
|
|
40
|
-
'traceId': span.get_span_context().trace_id,
|
|
40
|
+
'traceId': format_trace_id(span.get_span_context().trace_id),
|
|
41
41
|
'instrumentationLibrary': span.instrumentation_info.__repr__(),
|
|
42
42
|
'droppedEventsCount': span.dropped_events,
|
|
43
43
|
'droppedAttributesCount': span.dropped_attributes,
|
langtrace-python-sdk-1.1.5/src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py
ADDED
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Instrumentation for Anthropic
|
|
3
|
+
"""
|
|
4
|
+
import importlib.metadata
|
|
5
|
+
from typing import Collection
|
|
6
|
+
|
|
7
|
+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
|
|
8
|
+
from opentelemetry.trace import get_tracer
|
|
9
|
+
from wrapt import wrap_function_wrapper
|
|
10
|
+
|
|
11
|
+
from langtrace_python_sdk.instrumentation.anthropic.patch import \
|
|
12
|
+
messages_create
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class AnthropicInstrumentation(BaseInstrumentor):
|
|
16
|
+
"""
|
|
17
|
+
The AnthropicInstrumentation class represents the Anthropic instrumentation
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
def instrumentation_dependencies(self) -> Collection[str]:
|
|
21
|
+
return ["anthropic >= 0.19.1"]
|
|
22
|
+
|
|
23
|
+
def _instrument(self, **kwargs):
|
|
24
|
+
tracer_provider = kwargs.get("tracer_provider")
|
|
25
|
+
tracer = get_tracer(__name__, "", tracer_provider)
|
|
26
|
+
version = importlib.metadata.version('anthropic')
|
|
27
|
+
|
|
28
|
+
wrap_function_wrapper(
|
|
29
|
+
'anthropic.resources.messages',
|
|
30
|
+
'Messages.create',
|
|
31
|
+
messages_create(
|
|
32
|
+
'anthropic.messages.create', version, tracer)
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
def _instrument_module(self, module_name):
|
|
36
|
+
pass
|
|
37
|
+
|
|
38
|
+
def _uninstrument(self, **kwargs):
|
|
39
|
+
pass
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module contains the patching logic for the Anthropic library."""
|
|
3
|
+
import json
|
|
4
|
+
|
|
5
|
+
from langtrace.trace_attributes import Event, LLMSpanAttributes
|
|
6
|
+
from opentelemetry.trace import SpanKind
|
|
7
|
+
from opentelemetry.trace.status import Status, StatusCode
|
|
8
|
+
|
|
9
|
+
from langtrace_python_sdk.constants.instrumentation.anthropic import APIS
|
|
10
|
+
from langtrace_python_sdk.constants.instrumentation.common import \
|
|
11
|
+
SERVICE_PROVIDERS
|
|
12
|
+
from langtrace_python_sdk.utils.llm import estimate_tokens
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def messages_create(original_method, version, tracer):
|
|
16
|
+
"""Wrap the `messages_create` method."""
|
|
17
|
+
def traced_method(wrapped, instance, args, kwargs):
|
|
18
|
+
base_url = str(instance._client._base_url) if hasattr(
|
|
19
|
+
instance, '_client') and hasattr(instance._client, '_base_url') else ""
|
|
20
|
+
service_provider = SERVICE_PROVIDERS['ANTHROPIC']
|
|
21
|
+
span_attributes = {
|
|
22
|
+
"langtrace.service.name": service_provider,
|
|
23
|
+
"langtrace.service.type": "llm",
|
|
24
|
+
"langtrace.service.version": version,
|
|
25
|
+
"langtrace.version": "1.0.0",
|
|
26
|
+
"url.full": base_url,
|
|
27
|
+
"llm.api": APIS["MESSAGES_CREATE"]["ENDPOINT"],
|
|
28
|
+
"llm.model": kwargs.get('model'),
|
|
29
|
+
"llm.prompts": json.dumps(kwargs.get('messages', [])),
|
|
30
|
+
"llm.stream": kwargs.get('stream'),
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
attributes = LLMSpanAttributes(**span_attributes)
|
|
34
|
+
|
|
35
|
+
if kwargs.get('temperature') is not None:
|
|
36
|
+
attributes.llm_temperature = kwargs.get('temperature')
|
|
37
|
+
if kwargs.get('top_p') is not None:
|
|
38
|
+
attributes.llm_top_p = kwargs.get('top_p')
|
|
39
|
+
if kwargs.get('top_k') is not None:
|
|
40
|
+
attributes.llm_top_p = kwargs.get('top_k')
|
|
41
|
+
if kwargs.get('user') is not None:
|
|
42
|
+
attributes.llm_user = kwargs.get('user')
|
|
43
|
+
|
|
44
|
+
span = tracer.start_span(
|
|
45
|
+
APIS["MESSAGES_CREATE"]["METHOD"], kind=SpanKind.CLIENT)
|
|
46
|
+
for field, value in attributes.model_dump(by_alias=True).items():
|
|
47
|
+
if value is not None:
|
|
48
|
+
span.set_attribute(field, value)
|
|
49
|
+
try:
|
|
50
|
+
# Attempt to call the original method
|
|
51
|
+
result = wrapped(*args, **kwargs)
|
|
52
|
+
if kwargs.get('stream') is False:
|
|
53
|
+
if hasattr(result, 'content') and result.content is not None:
|
|
54
|
+
span.set_attribute(
|
|
55
|
+
"llm.responses", json.dumps([{
|
|
56
|
+
"text": result.content[0].text,
|
|
57
|
+
"type": result.content[0].type
|
|
58
|
+
}]))
|
|
59
|
+
else:
|
|
60
|
+
responses = []
|
|
61
|
+
span.set_attribute(
|
|
62
|
+
"llm.responses", json.dumps(responses))
|
|
63
|
+
if hasattr(result, 'system_fingerprint') and \
|
|
64
|
+
result.system_fingerprint is not None:
|
|
65
|
+
span.set_attribute(
|
|
66
|
+
"llm.system.fingerprint", result.system_fingerprint)
|
|
67
|
+
# Get the usage
|
|
68
|
+
if hasattr(result, 'usage') and result.usage is not None:
|
|
69
|
+
usage = result.usage
|
|
70
|
+
if usage is not None:
|
|
71
|
+
usage_dict = {
|
|
72
|
+
"input_tokens": usage.input_tokens,
|
|
73
|
+
"output_tokens": usage.output_tokens,
|
|
74
|
+
"total_tokens": usage.input_tokens + usage.output_tokens
|
|
75
|
+
}
|
|
76
|
+
span.set_attribute(
|
|
77
|
+
"llm.token.counts", json.dumps(usage_dict))
|
|
78
|
+
span.set_status(StatusCode.OK)
|
|
79
|
+
span.end()
|
|
80
|
+
return result
|
|
81
|
+
else:
|
|
82
|
+
return handle_streaming_response(result, span)
|
|
83
|
+
except Exception as e:
|
|
84
|
+
# Record the exception in the span
|
|
85
|
+
span.record_exception(e)
|
|
86
|
+
# Set the span status to indicate an error
|
|
87
|
+
span.set_status(Status(StatusCode.ERROR, str(e)))
|
|
88
|
+
# Reraise the exception to ensure it's not swallowed
|
|
89
|
+
span.end()
|
|
90
|
+
raise
|
|
91
|
+
|
|
92
|
+
def handle_streaming_response(result, span):
|
|
93
|
+
"""Process and yield streaming response chunks."""
|
|
94
|
+
result_content = []
|
|
95
|
+
span.add_event(Event.STREAM_START.value)
|
|
96
|
+
input_tokens = 0
|
|
97
|
+
output_tokens = 0
|
|
98
|
+
try:
|
|
99
|
+
for chunk in result:
|
|
100
|
+
content = ""
|
|
101
|
+
if hasattr(chunk, 'delta') and chunk.delta is not None:
|
|
102
|
+
content = chunk.delta.text if hasattr(
|
|
103
|
+
chunk.delta, 'text') else ""
|
|
104
|
+
# Assuming content needs to be aggregated before processing
|
|
105
|
+
result_content.append(content if len(content) > 0 else "")
|
|
106
|
+
|
|
107
|
+
if hasattr(chunk, 'message') and hasattr(chunk.message, 'usage'):
|
|
108
|
+
input_tokens += chunk.message.usage.input_tokens if hasattr(
|
|
109
|
+
chunk.message.usage, 'input_tokens') else 0
|
|
110
|
+
output_tokens += chunk.message.usage.output_tokens if hasattr(
|
|
111
|
+
chunk.message.usage, 'output_tokens') else 0
|
|
112
|
+
|
|
113
|
+
# Assuming span.add_event is part of a larger logging or event system
|
|
114
|
+
# Add event for each chunk of content
|
|
115
|
+
if content:
|
|
116
|
+
span.add_event(Event.STREAM_OUTPUT.value, {
|
|
117
|
+
"response": "".join(content)
|
|
118
|
+
})
|
|
119
|
+
|
|
120
|
+
# Assuming this is part of a generator, yield chunk or aggregated content
|
|
121
|
+
yield content
|
|
122
|
+
finally:
|
|
123
|
+
|
|
124
|
+
# Finalize span after processing all chunks
|
|
125
|
+
span.add_event(Event.STREAM_END.value)
|
|
126
|
+
span.set_attribute("llm.token.counts", json.dumps({
|
|
127
|
+
"input_tokens": input_tokens,
|
|
128
|
+
"output_tokens": output_tokens,
|
|
129
|
+
"total_tokens": input_tokens + output_tokens
|
|
130
|
+
}))
|
|
131
|
+
span.set_attribute("llm.responses", json.dumps(
|
|
132
|
+
[{"text": "".join(result_content)}]))
|
|
133
|
+
span.set_status(StatusCode.OK)
|
|
134
|
+
span.end()
|
|
135
|
+
|
|
136
|
+
# return the wrapped method
|
|
137
|
+
return traced_method
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/langtrace.py
RENAMED
|
@@ -6,6 +6,8 @@ from opentelemetry.sdk.trace.export import (BatchSpanProcessor,
|
|
|
6
6
|
|
|
7
7
|
from langtrace_python_sdk.extensions.langtrace_exporter import \
|
|
8
8
|
LangTraceExporter
|
|
9
|
+
from langtrace_python_sdk.instrumentation.anthropic.instrumentation import \
|
|
10
|
+
AnthropicInstrumentation
|
|
9
11
|
from langtrace_python_sdk.instrumentation.chroma.instrumentation import \
|
|
10
12
|
ChromaInstrumentation
|
|
11
13
|
from langtrace_python_sdk.instrumentation.langchain.instrumentation import \
|
|
@@ -61,6 +63,7 @@ def init(
|
|
|
61
63
|
langchain_instrumentation = LangchainInstrumentation()
|
|
62
64
|
langchain_core_instrumentation = LangchainCoreInstrumentation()
|
|
63
65
|
langchain_community_instrumentation = LangchainCommunityInstrumentation()
|
|
66
|
+
anthropic_instrumentation = AnthropicInstrumentation()
|
|
64
67
|
|
|
65
68
|
# Call the instrument method with some arguments
|
|
66
69
|
openai_instrumentation.instrument()
|
|
@@ -70,3 +73,4 @@ def init(
|
|
|
70
73
|
langchain_instrumentation.instrument()
|
|
71
74
|
langchain_core_instrumentation.instrument()
|
|
72
75
|
langchain_community_instrumentation.instrument()
|
|
76
|
+
anthropic_instrumentation.instrument()
|
|
File without changes
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: langtrace-python-sdk
|
|
3
|
+
Version: 1.1.5
|
|
4
|
+
Summary: Python SDK for LangTrace
|
|
5
|
+
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
+
Author: Scale3 Labs
|
|
7
|
+
Author-email: engineering@scale3labs.com
|
|
8
|
+
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
+
License: AGPL-3.0-or-later
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.6
|
|
14
|
+
Description-Content-Type: text/markdown
|
|
15
|
+
License-File: LICENSE
|
|
16
|
+
|
|
17
|
+
<h1 align="center">LangTrace</h1>
|
|
18
|
+
|
|
19
|
+
Looking for the Typescript version? Check out [langtrace-typescript](https://github.com/Scale3-Labs/langtrace-typescript-sdk).
|
|
20
|
+
|
|
21
|
+
LangTrace is a set of extensions built on top of [OpenTelemetry](https://opentelemetry.io/) that gives you complete observability over your LLM application. Because it uses OpenTelemetry under the hood.
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
The repo contains standard OpenTelemetry instrumentations for LLM providers and Vector DBs, while still outputting standard OpenTelemetry data that can be connected to your observability stack.
|
|
25
|
+
If you already have OpenTelemetry instrumented, you can just add any of our instrumentations directly.
|
|
26
|
+
|
|
27
|
+
## 🚀 Getting Started
|
|
28
|
+
|
|
29
|
+
The easiest way to get started is to use our SDK.
|
|
30
|
+
|
|
31
|
+
Install the SDK:
|
|
32
|
+
|
|
33
|
+
```bash
|
|
34
|
+
pip install langtrace-python-sdk
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
Then, to start instrumenting your code, just add this line to your code:
|
|
38
|
+
|
|
39
|
+
```python
|
|
40
|
+
from langtrace_python_sdk import langtrace
|
|
41
|
+
|
|
42
|
+
langtrace.init()
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
That's it. You're now tracing your code with LangTrace!
|
|
46
|
+
If you want to see the traces you can enable logging
|
|
47
|
+
|
|
48
|
+
```python
|
|
49
|
+
langtrace.init(log_spans_to_console=True)
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
If you want to export traces to an external endpoint, you will need to add ```LANGTRACE_URL``` to ```.env``` file.
|
|
53
|
+
```python
|
|
54
|
+
langtrace.init(write_to_remote_url=True)
|
|
55
|
+
```
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
## 🪗 What do we instrument?
|
|
60
|
+
|
|
61
|
+
OpenLLMetry can instrument everything that [OpenTelemetry already instruments](https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation) - so things like your DB, API calls, and more. On top of that, we built a set of custom extensions that instrument things like your calls to OpenAI or Anthropic, or your Vector DB like Chroma, Pinecone, Qdrant or Weaviate.
|
|
62
|
+
|
|
63
|
+
### LLM Providers
|
|
64
|
+
|
|
65
|
+
- ✅ OpenAI / Azure OpenAI
|
|
66
|
+
- ✅ Anthropic
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
### Vector DBs
|
|
72
|
+
|
|
73
|
+
- ✅ Chroma
|
|
74
|
+
- ✅ Pinecone
|
|
75
|
+
|
|
76
|
+
### Frameworks
|
|
77
|
+
|
|
78
|
+
- ✅ LangChain
|
|
79
|
+
- ✅ [LlamaIndex](https://docs.llamaindex.ai/en/stable/module_guides/observability/observability.html#openllmetry)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
|
|
@@ -2,6 +2,8 @@ LICENSE
|
|
|
2
2
|
README.md
|
|
3
3
|
setup.py
|
|
4
4
|
src/examples/__init__.py
|
|
5
|
+
src/examples/anthropic_example/__init__.py
|
|
6
|
+
src/examples/anthropic_example/completion.py
|
|
5
7
|
src/examples/chroma_example/__init__.py
|
|
6
8
|
src/examples/chroma_example/basic.py
|
|
7
9
|
src/examples/langchain_example/__init__.py
|
|
@@ -24,6 +26,7 @@ src/langtrace_python_sdk.egg-info/dependency_links.txt
|
|
|
24
26
|
src/langtrace_python_sdk.egg-info/top_level.txt
|
|
25
27
|
src/langtrace_python_sdk/constants/__init__.py
|
|
26
28
|
src/langtrace_python_sdk/constants/instrumentation/__init__.py
|
|
29
|
+
src/langtrace_python_sdk/constants/instrumentation/anthropic.py
|
|
27
30
|
src/langtrace_python_sdk/constants/instrumentation/chroma.py
|
|
28
31
|
src/langtrace_python_sdk/constants/instrumentation/common.py
|
|
29
32
|
src/langtrace_python_sdk/constants/instrumentation/openai.py
|
|
@@ -31,8 +34,10 @@ src/langtrace_python_sdk/constants/instrumentation/pinecone.py
|
|
|
31
34
|
src/langtrace_python_sdk/extensions/__init__.py
|
|
32
35
|
src/langtrace_python_sdk/extensions/langtrace_exporter.py
|
|
33
36
|
src/langtrace_python_sdk/instrumentation/__init__.py
|
|
37
|
+
src/langtrace_python_sdk/instrumentation/anthropic/__init__.py
|
|
38
|
+
src/langtrace_python_sdk/instrumentation/anthropic/instrumentation.py
|
|
39
|
+
src/langtrace_python_sdk/instrumentation/anthropic/patch.py
|
|
34
40
|
src/langtrace_python_sdk/instrumentation/chroma/__init__.py
|
|
35
|
-
src/langtrace_python_sdk/instrumentation/chroma/apis.py
|
|
36
41
|
src/langtrace_python_sdk/instrumentation/chroma/instrumentation.py
|
|
37
42
|
src/langtrace_python_sdk/instrumentation/chroma/patch.py
|
|
38
43
|
src/langtrace_python_sdk/instrumentation/langchain/__init__.py
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: langtrace-python-sdk
|
|
3
|
-
Version: 1.1.3
|
|
4
|
-
Summary: Python SDK for LangTrace
|
|
5
|
-
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
-
Author: Scale3 Labs
|
|
7
|
-
Author-email: engineering@scale3labs.com
|
|
8
|
-
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
-
License: AGPL-3.0-or-later
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.6
|
|
14
|
-
Description-Content-Type: text/markdown
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
|
|
17
|
-
LangTrace - Python SDK
|
|
@@ -1,22 +0,0 @@
|
|
|
1
|
-
# langtrace-python-sdk
|
|
2
|
-
|
|
3
|
-
export PYTHONPATH="/Users/karthikkalyanaraman/work/langtrace/python-sdk:$PYTHONPATH"
|
|
4
|
-
|
|
5
|
-
## Steps to run
|
|
6
|
-
|
|
7
|
-
1. From your root directory, create a virtualenv for installing your dependencies
|
|
8
|
-
```
|
|
9
|
-
python -m venv pysdk
|
|
10
|
-
```
|
|
11
|
-
2. Activate the virtualenv
|
|
12
|
-
```
|
|
13
|
-
source pysdk/bin/activate
|
|
14
|
-
```
|
|
15
|
-
3. Install the dependencies
|
|
16
|
-
```
|
|
17
|
-
pip install -r requirements.txt
|
|
18
|
-
```
|
|
19
|
-
4. Run the example and see the traces on the terminal
|
|
20
|
-
```
|
|
21
|
-
python src/entrypoint.py
|
|
22
|
-
```
|
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
from langtrace.trace_attributes import ChromaDBMethods
|
|
2
|
-
|
|
3
|
-
APIS = {
|
|
4
|
-
"ADD": {
|
|
5
|
-
"METHOD": ChromaDBMethods.ADD.value,
|
|
6
|
-
"OPERATION": "add",
|
|
7
|
-
},
|
|
8
|
-
"GET": {
|
|
9
|
-
"METHOD": ChromaDBMethods.GET.value,
|
|
10
|
-
"OPERATION": "get",
|
|
11
|
-
},
|
|
12
|
-
"QUERY": {
|
|
13
|
-
"METHOD": ChromaDBMethods.QUERY.value,
|
|
14
|
-
"OPERATION": "query",
|
|
15
|
-
},
|
|
16
|
-
"DELETE": {
|
|
17
|
-
"METHOD": ChromaDBMethods.DELETE.value,
|
|
18
|
-
"OPERATION": "delete",
|
|
19
|
-
},
|
|
20
|
-
"PEEK": {
|
|
21
|
-
"METHOD": ChromaDBMethods.PEEK.value,
|
|
22
|
-
"OPERATION": "peek",
|
|
23
|
-
},
|
|
24
|
-
"UPDATE": {
|
|
25
|
-
"METHOD": ChromaDBMethods.UPDATE.value,
|
|
26
|
-
"OPERATION": "update",
|
|
27
|
-
},
|
|
28
|
-
"UPSERT": {
|
|
29
|
-
"METHOD": ChromaDBMethods.UPSERT.value,
|
|
30
|
-
"OPERATION": "upsert",
|
|
31
|
-
},
|
|
32
|
-
"MODIFY": {
|
|
33
|
-
"METHOD": ChromaDBMethods.MODIFY.value,
|
|
34
|
-
"OPERATION": "modify",
|
|
35
|
-
},
|
|
36
|
-
"COUNT": {
|
|
37
|
-
"METHOD": ChromaDBMethods.COUNT.value,
|
|
38
|
-
"OPERATION": "count",
|
|
39
|
-
},
|
|
40
|
-
}
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.1
|
|
2
|
-
Name: langtrace-python-sdk
|
|
3
|
-
Version: 1.1.3
|
|
4
|
-
Summary: Python SDK for LangTrace
|
|
5
|
-
Home-page: https://github.com/Scale3-Labs/langtrace-python-sdk
|
|
6
|
-
Author: Scale3 Labs
|
|
7
|
-
Author-email: engineering@scale3labs.com
|
|
8
|
-
Maintainer: ['Ali Waleed', 'Darshit Suratwala', 'Dylan Zuber', 'Karthik Kalyanaraman', 'Obinna Okafor', 'Rohit Kadhe', 'Yemi Adejumobi']
|
|
9
|
-
License: AGPL-3.0-or-later
|
|
10
|
-
Classifier: Programming Language :: Python :: 3
|
|
11
|
-
Classifier: License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
|
|
12
|
-
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.6
|
|
14
|
-
Description-Content-Type: text/markdown
|
|
15
|
-
License-File: LICENSE
|
|
16
|
-
|
|
17
|
-
LangTrace - Python SDK
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/__init__.py
RENAMED
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/chroma_example/basic.py
RENAMED
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/basic.py
RENAMED
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/langchain_example/tool.py
RENAMED
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/llamaindex_example/basic.py
RENAMED
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/chat_completion.py
RENAMED
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/embeddings_create.py
RENAMED
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/function_calling.py
RENAMED
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/openai/images_generate.py
RENAMED
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/examples/pinecone_example/basic.py
RENAMED
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{langtrace-python-sdk-1.1.3 → langtrace-python-sdk-1.1.5}/src/langtrace_python_sdk/utils/llm.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|