openlit 0.0.2__tar.gz → 0.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. {openlit-0.0.2 → openlit-0.0.3}/PKG-INFO +68 -7
  2. {openlit-0.0.2 → openlit-0.0.3}/README.md +65 -4
  3. {openlit-0.0.2 → openlit-0.0.3}/pyproject.toml +3 -3
  4. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/__helpers.py +1 -1
  5. {openlit-0.0.2 → openlit-0.0.3}/LICENSE +0 -0
  6. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/__init__.py +0 -0
  7. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/anthropic/__init__.py +0 -0
  8. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/anthropic/anthropic.py +0 -0
  9. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/anthropic/async_anthropic.py +0 -0
  10. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/chroma/__init__.py +0 -0
  11. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/chroma/chroma.py +0 -0
  12. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/cohere/__init__.py +0 -0
  13. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/cohere/cohere.py +0 -0
  14. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/langchain/__init__.py +0 -0
  15. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/langchain/langchain.py +0 -0
  16. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/mistral/__init__.py +0 -0
  17. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/mistral/async_mistral.py +0 -0
  18. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/mistral/mistral.py +0 -0
  19. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/openai/__init__.py +0 -0
  20. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/openai/async_azure_openai.py +0 -0
  21. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/openai/async_openai.py +0 -0
  22. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/openai/azure_openai.py +0 -0
  23. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/openai/openai.py +0 -0
  24. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/pinecone/__init__.py +0 -0
  25. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/pinecone/pinecone.py +0 -0
  26. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/transformers/__init__.py +0 -0
  27. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/instrumentation/transformers/transformers.py +0 -0
  28. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/otel/metrics.py +0 -0
  29. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/otel/tracing.py +0 -0
  30. {openlit-0.0.2 → openlit-0.0.3}/src/openlit/semcov/__init__.py +0 -0
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: openlit
3
- Version: 0.0.2
3
+ Version: 0.0.3
4
4
  Summary: OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects
5
- Home-page: https://github.com/open-lit/openlit/tree/main/openlit/python
5
+ Home-page: https://github.com/openlit/openlit/tree/main/openlit/python
6
6
  Keywords: OpenTelemetry,otel,otlp,llm,tracing,openai,anthropic,claude,cohere,llm monitoring,observability,monitoring,gpt,Generative AI,chatGPT
7
7
  Author: OpenLIT
8
8
  Requires-Python: >=3.7.1,<4.0.0
@@ -18,10 +18,25 @@ Requires-Dist: opentelemetry-instrumentation (>=0.45b0,<0.46)
18
18
  Requires-Dist: opentelemetry-sdk (>=1.24.0,<2.0.0)
19
19
  Requires-Dist: requests (>=2.26.0,<3.0.0)
20
20
  Requires-Dist: tiktoken (>=0.1.1,<0.2.0)
21
- Project-URL: Repository, https://github.com/open-lit/openlit/tree/main/openlit/python
21
+ Project-URL: Repository, https://github.com/openlit/openlit/tree/main/openlit/python
22
22
  Description-Content-Type: text/markdown
23
23
 
24
- # OpenLIT: OpenTelemetry-native Observability for LLMs
24
+ <div align="center">
25
+ <img src="https://github.com/openlit/.github/blob/main/profile/assets/wide-logo-no-bg.png?raw=true" alt="OpenLIT Logo" width="30%"><h1>
26
+ OpenTelemetry Auto-Instrumentation for GenAI & LLM Applications</h1>
27
+
28
+ **[Documentation](https://docs.openlit.io/) | [Quickstart](#-getting-started) | [Python SDK](https://github.com/openlit/openlit/tree/main/sdk/python)**
29
+
30
+ [![OpenLIT](https://img.shields.io/badge/OpenLIT-orange)](https://github.com/openlit/openlit)
31
+ [![License](https://img.shields.io/github/license/openlit/openlit?label=License&logo=github&color=f80&logoColor=white)](https://github.com/openlit/openlit/blob/main/LICENSE)
32
+ [![Downloads](https://static.pepy.tech/badge/dokumetry/month)](https://pepy.tech/project/dokumetry)
33
+ [![GitHub Last Commit](https://img.shields.io/github/last-commit/openlit/openlit)](https://github.com/openlit/openlit/pulse)
34
+ [![GitHub Contributors](https://img.shields.io/github/contributors/openlit/openlit)](https://github.com/openlit/openlit/graphs/contributors)
35
+
36
+ [![Slack](https://img.shields.io/badge/Slack-4A154B?logo=slack&logoColor=white)](https://join.slack.com/t/openlit/shared_invite/zt-2etnfttwg-TjP_7BZXfYg84oAukY8QRQ)
37
+ [![X](https://img.shields.io/badge/follow-%40OpenLIT-1DA1F2?logo=x&style=social)](https://twitter.com/openlit_io)
38
+
39
+ </div>
25
40
 
26
41
  OpenLIT Python SDK is an **OpenTelemetry-native** Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects. Designed with simplicity and efficiency, OpenLIT offers the ability to embed observability into your GenAI-driven projects effortlessly using just **a single line of code**.
27
42
 
@@ -63,10 +78,20 @@ This project adheres to the [Semantic Conventions](https://github.com/open-telem
63
78
  pip install openlit
64
79
  ```
65
80
 
66
- ## Quick Integration
81
+ ## 🚀 Getting Started
82
+
83
+ ## Step 1: Install OpenLIT SDK
84
+
85
+ ```bash
86
+ pip install openlit
87
+ ```
88
+
89
+ ### Step 2: Instrument your Application
90
+ Integrating the OpenLIT into LLM applications is straightforward. Start monitoring for your LLM Application with just **one line of code**:
67
91
 
68
92
  ```python
69
93
  import openlit
94
+
70
95
  openlit.init()
71
96
  ```
72
97
 
@@ -74,6 +99,42 @@ By default, OpenLIT directs traces and metrics straight to your console. To forw
74
99
 
75
100
  To send telemetry to OpenTelemetry backends requiring authentication, set the `otlp_headers` parameter with its desired value. Alternatively, you can configure the endpoint by setting the `OTEL_EXPORTER_OTLP_HEADERS` environment variable as recommended in the OpenTelemetry documentation.
76
101
 
102
+ #### Example
103
+
104
+ Here is how you can send telemetry from OpenLIT to Grafana Cloud
105
+
106
+ ```python
107
+ openlit.init(
108
+ otlp_endpoint="https://otlp-gateway-prod-us-east-0.grafana.net/otlp",
109
+ otlp_headers="Authorization=Basic%20<base64 encoded Instance ID and API Token>"
110
+ )
111
+ ```
112
+
113
+ Alternatively, You can also choose to set these values using `OTEL_EXPORTER_OTLP_ENDPOINT` and `OTEL_EXPORTER_OTLP_HEADERS` environment variables
114
+
115
+ ```python
116
+ openlit.init()
117
+ ```
118
+
119
+ ```env
120
+ export OTEL_EXPORTER_OTLP_ENDPOINT = "https://otlp-gateway-prod-us-east-0.grafana.net/otlp"
121
+ export OTEL_EXPORTER_OTLP_HEADERS = "Authorization=Basic%20<base64 encoded Instance ID and API Token>"
122
+ ```
123
+
124
+ ### Step 3: Visualize and Optimize!
125
+ With the LLM Observability data now being collected and sent to your chosen OpenTelemetry backend, the next step is to visualize and analyze this data to glean insights into your application's performance, behavior, and identify areas of improvement. Here is how you would use the data in Grafana, follow these detailed instructions to explore your LLM application's Telemetry data.
126
+
127
+ - Select the **Explore** option from Grafana's sidebar.
128
+ - At the top, ensure the correct Tempo data source is selected from the dropdown menu.
129
+ - Use the **Query** field to specify any particular traces you are interested in, or leave it empty to browse through all the available traces.
130
+ - You can adjust the time range to focus on specific periods of interest.
131
+ - Hit **Run Query** to fetch your trace data. You'll see a visual representation of your traces along with detailed information on particular spans when clicked.
132
+
133
+ #### Next Steps
134
+
135
+ - **Create Dashboards:** Beyond just exploring traces, consider creating dashboards in Grafana to monitor key performance indicators (KPIs) and metrics over time. Dashboards can be customized with various panels to display graphs, logs, and single stats that are most relevant to your application's performance and usage patterns.
136
+ - **Set Alerts:** Grafana also allows you to set up alerts based on specific thresholds. This feature can be invaluable in proactively managing your application's health by notifying you of potential issues before they impact users.
137
+ - **Iterate and Optimize:** Use the insights gained from your observability data to make informed decisions on optimizing your LLM application. This might involve refining model parameters, adjusting scaling strategies, or identifying and resolving bottlenecks.
77
138
 
78
139
 
79
140
  ### Configuration
@@ -107,7 +168,7 @@ Your input helps us grow and improve, and we're here to support you every step o
107
168
 
108
169
  Connect with the OpenLIT community and maintainers for support, discussions, and updates:
109
170
 
110
- - 🌟 If you like it, Leave a star on our [GitHub](https://github.com/open-lit/openlit/)
171
+ - 🌟 If you like it, Leave a star on our [GitHub](https://github.com/openlit/openlit/)
111
172
  - 🌍 Join our [Slack](https://join.slack.com/t/openlit/shared_invite/zt-2etnfttwg-TjP_7BZXfYg84oAukY8QRQ) Community for live interactions and questions.
112
- - 🐞 Report bugs on our [GitHub Issues](https://github.com/open-lit/openlit/issues) to help us improve OpenLIT.
173
+ - 🐞 Report bugs on our [GitHub Issues](https://github.com/openlit/openlit/issues) to help us improve OpenLIT.
113
174
  - 𝕏 Follow us on [X](https://twitter.com/openlit) for the latest updates and news.
@@ -1,4 +1,19 @@
1
- # OpenLIT: OpenTelemetry-native Observability for LLMs
1
+ <div align="center">
2
+ <img src="https://github.com/openlit/.github/blob/main/profile/assets/wide-logo-no-bg.png?raw=true" alt="OpenLIT Logo" width="30%"><h1>
3
+ OpenTelemetry Auto-Instrumentation for GenAI & LLM Applications</h1>
4
+
5
+ **[Documentation](https://docs.openlit.io/) | [Quickstart](#-getting-started) | [Python SDK](https://github.com/openlit/openlit/tree/main/sdk/python)**
6
+
7
+ [![OpenLIT](https://img.shields.io/badge/OpenLIT-orange)](https://github.com/openlit/openlit)
8
+ [![License](https://img.shields.io/github/license/openlit/openlit?label=License&logo=github&color=f80&logoColor=white)](https://github.com/openlit/openlit/blob/main/LICENSE)
9
+ [![Downloads](https://static.pepy.tech/badge/dokumetry/month)](https://pepy.tech/project/dokumetry)
10
+ [![GitHub Last Commit](https://img.shields.io/github/last-commit/openlit/openlit)](https://github.com/openlit/openlit/pulse)
11
+ [![GitHub Contributors](https://img.shields.io/github/contributors/openlit/openlit)](https://github.com/openlit/openlit/graphs/contributors)
12
+
13
+ [![Slack](https://img.shields.io/badge/Slack-4A154B?logo=slack&logoColor=white)](https://join.slack.com/t/openlit/shared_invite/zt-2etnfttwg-TjP_7BZXfYg84oAukY8QRQ)
14
+ [![X](https://img.shields.io/badge/follow-%40OpenLIT-1DA1F2?logo=x&style=social)](https://twitter.com/openlit_io)
15
+
16
+ </div>
2
17
 
3
18
  OpenLIT Python SDK is an **OpenTelemetry-native** Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects. Designed with simplicity and efficiency, OpenLIT offers the ability to embed observability into your GenAI-driven projects effortlessly using just **a single line of code**.
4
19
 
@@ -40,10 +55,20 @@ This project adheres to the [Semantic Conventions](https://github.com/open-telem
40
55
  pip install openlit
41
56
  ```
42
57
 
43
- ## Quick Integration
58
+ ## 🚀 Getting Started
59
+
60
+ ## Step 1: Install OpenLIT SDK
61
+
62
+ ```bash
63
+ pip install openlit
64
+ ```
65
+
66
+ ### Step 2: Instrument your Application
67
+ Integrating the OpenLIT into LLM applications is straightforward. Start monitoring for your LLM Application with just **one line of code**:
44
68
 
45
69
  ```python
46
70
  import openlit
71
+
47
72
  openlit.init()
48
73
  ```
49
74
 
@@ -51,6 +76,42 @@ By default, OpenLIT directs traces and metrics straight to your console. To forw
51
76
 
52
77
  To send telemetry to OpenTelemetry backends requiring authentication, set the `otlp_headers` parameter with its desired value. Alternatively, you can configure the endpoint by setting the `OTEL_EXPORTER_OTLP_HEADERS` environment variable as recommended in the OpenTelemetry documentation.
53
78
 
79
+ #### Example
80
+
81
+ Here is how you can send telemetry from OpenLIT to Grafana Cloud
82
+
83
+ ```python
84
+ openlit.init(
85
+ otlp_endpoint="https://otlp-gateway-prod-us-east-0.grafana.net/otlp",
86
+ otlp_headers="Authorization=Basic%20<base64 encoded Instance ID and API Token>"
87
+ )
88
+ ```
89
+
90
+ Alternatively, You can also choose to set these values using `OTEL_EXPORTER_OTLP_ENDPOINT` and `OTEL_EXPORTER_OTLP_HEADERS` environment variables
91
+
92
+ ```python
93
+ openlit.init()
94
+ ```
95
+
96
+ ```env
97
+ export OTEL_EXPORTER_OTLP_ENDPOINT = "https://otlp-gateway-prod-us-east-0.grafana.net/otlp"
98
+ export OTEL_EXPORTER_OTLP_HEADERS = "Authorization=Basic%20<base64 encoded Instance ID and API Token>"
99
+ ```
100
+
101
+ ### Step 3: Visualize and Optimize!
102
+ With the LLM Observability data now being collected and sent to your chosen OpenTelemetry backend, the next step is to visualize and analyze this data to glean insights into your application's performance, behavior, and identify areas of improvement. Here is how you would use the data in Grafana, follow these detailed instructions to explore your LLM application's Telemetry data.
103
+
104
+ - Select the **Explore** option from Grafana's sidebar.
105
+ - At the top, ensure the correct Tempo data source is selected from the dropdown menu.
106
+ - Use the **Query** field to specify any particular traces you are interested in, or leave it empty to browse through all the available traces.
107
+ - You can adjust the time range to focus on specific periods of interest.
108
+ - Hit **Run Query** to fetch your trace data. You'll see a visual representation of your traces along with detailed information on particular spans when clicked.
109
+
110
+ #### Next Steps
111
+
112
+ - **Create Dashboards:** Beyond just exploring traces, consider creating dashboards in Grafana to monitor key performance indicators (KPIs) and metrics over time. Dashboards can be customized with various panels to display graphs, logs, and single stats that are most relevant to your application's performance and usage patterns.
113
+ - **Set Alerts:** Grafana also allows you to set up alerts based on specific thresholds. This feature can be invaluable in proactively managing your application's health by notifying you of potential issues before they impact users.
114
+ - **Iterate and Optimize:** Use the insights gained from your observability data to make informed decisions on optimizing your LLM application. This might involve refining model parameters, adjusting scaling strategies, or identifying and resolving bottlenecks.
54
115
 
55
116
 
56
117
  ### Configuration
@@ -84,7 +145,7 @@ Your input helps us grow and improve, and we're here to support you every step o
84
145
 
85
146
  Connect with the OpenLIT community and maintainers for support, discussions, and updates:
86
147
 
87
- - 🌟 If you like it, Leave a star on our [GitHub](https://github.com/open-lit/openlit/)
148
+ - 🌟 If you like it, Leave a star on our [GitHub](https://github.com/openlit/openlit/)
88
149
  - 🌍 Join our [Slack](https://join.slack.com/t/openlit/shared_invite/zt-2etnfttwg-TjP_7BZXfYg84oAukY8QRQ) Community for live interactions and questions.
89
- - 🐞 Report bugs on our [GitHub Issues](https://github.com/open-lit/openlit/issues) to help us improve OpenLIT.
150
+ - 🐞 Report bugs on our [GitHub Issues](https://github.com/openlit/openlit/issues) to help us improve OpenLIT.
90
151
  - 𝕏 Follow us on [X](https://twitter.com/openlit) for the latest updates and news.
@@ -1,11 +1,11 @@
1
1
  [tool.poetry]
2
2
  name = "openlit"
3
- version = "0.0.2"
3
+ version = "0.0.3"
4
4
  description = "OpenTelemetry-native Auto instrumentation library for monitoring LLM Applications, facilitating the integration of observability into your GenAI-driven projects"
5
5
  authors = ["OpenLIT"]
6
- repository = "https://github.com/open-lit/openlit/tree/main/openlit/python"
6
+ repository = "https://github.com/openlit/openlit/tree/main/openlit/python"
7
7
  readme = "README.md"
8
- homepage = "https://github.com/open-lit/openlit/tree/main/openlit/python"
8
+ homepage = "https://github.com/openlit/openlit/tree/main/openlit/python"
9
9
  keywords = ["OpenTelemetry", "otel", "otlp","llm", "tracing", "openai", "anthropic", "claude", "cohere", "llm monitoring", "observability", "monitoring", "gpt", "Generative AI", "chatGPT"]
10
10
 
11
11
  [tool.poetry.dependencies]
@@ -124,7 +124,7 @@ def get_audio_model_cost(model, pricing_info, prompt):
124
124
 
125
125
  def fetch_pricing_info():
126
126
  """Fetches pricing information from a specified URL."""
127
- pricing_url = "https://raw.githubusercontent.com/dokulabs/doku/main/assets/pricing.json"
127
+ pricing_url = "https://raw.githubusercontent.com/openlit/openlit/main/assets/pricing.json"
128
128
  try:
129
129
  # Set a timeout of 10 seconds for both the connection and the read
130
130
  response = requests.get(pricing_url, timeout=20)
File without changes
File without changes