langfuse-prompt-library-iauro 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2026 [Vaibhav Hopal/iauro Systems Pvt Ltd]
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,252 @@
1
+ Metadata-Version: 2.4
2
+ Name: langfuse-prompt-library-iauro
3
+ Version: 0.1.0
4
+ Summary: A production-ready wrapper for Langfuse prompts and tracing with LLM integrations
5
+ Author-email: Sunny Mane <sunny.mane@iauro.com>
6
+ Maintainer-email: Vaibhav Hopal <vaibhav.hopal@iauro.com>
7
+ License: MIT
8
+ Project-URL: Homepage, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library
9
+ Project-URL: Documentation, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library#readme
10
+ Project-URL: Repository, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library
11
+ Project-URL: Issues, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library/issues
12
+ Keywords: langfuse,llm,openai,anthropic,prompt-management,tracing,observability,langchain
13
+ Classifier: Development Status :: 4 - Beta
14
+ Classifier: Intended Audience :: Developers
15
+ Classifier: License :: OSI Approved :: MIT License
16
+ Classifier: Programming Language :: Python :: 3
17
+ Classifier: Programming Language :: Python :: 3.8
18
+ Classifier: Programming Language :: Python :: 3.9
19
+ Classifier: Programming Language :: Python :: 3.10
20
+ Classifier: Programming Language :: Python :: 3.11
21
+ Classifier: Programming Language :: Python :: 3.12
22
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
23
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
24
+ Requires-Python: >=3.8
25
+ Description-Content-Type: text/markdown
26
+ License-File: LICENSE
27
+ Requires-Dist: langfuse>=3.0.0
28
+ Requires-Dist: openai>=1.0.0
29
+ Requires-Dist: python-dotenv>=1.0.0
30
+ Provides-Extra: anthropic
31
+ Requires-Dist: anthropic>=0.18.0; extra == "anthropic"
32
+ Provides-Extra: dev
33
+ Requires-Dist: pytest>=7.0.0; extra == "dev"
34
+ Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
35
+ Requires-Dist: black>=23.0.0; extra == "dev"
36
+ Requires-Dist: flake8>=6.0.0; extra == "dev"
37
+ Requires-Dist: mypy>=1.0.0; extra == "dev"
38
+ Requires-Dist: isort>=5.12.0; extra == "dev"
39
+ Provides-Extra: all
40
+ Requires-Dist: anthropic>=0.18.0; extra == "all"
41
+ Dynamic: license-file
42
+
43
+ # Langfuse Prompt Library
44
+
45
+ A production-ready Python wrapper for [Langfuse](https://langfuse.com/) prompts and tracing with integrated LLM support (OpenAI, Anthropic).
46
+
47
+ ## Features
48
+
49
+ - **Prompt Management**: Fetch and manage prompts from Langfuse with built-in caching
50
+ - **Multi-Provider Support**: Works with OpenAI (GPT models) and Anthropic (Claude models)
51
+ - **Automatic Tracing**: Built-in observability and tracing for all LLM calls
52
+ - **Token Tracking**: Automatic token usage tracking and reporting
53
+ - **Retry Logic**: Configurable retry mechanism with exponential backoff
54
+ - **Type Safety**: Full type hints and validation
55
+ - **Thread-Safe Caching**: Efficient prompt caching with TTL support
56
+ - **Error Handling**: Comprehensive error handling and custom exceptions
57
+ - **Production Ready**: Designed for enterprise use with logging, metrics, and cleanup handlers
58
+
59
+ ## Installation
60
+
61
+ ```bash
62
+ pip install langfuse-prompt-library-iauro
63
+ ```
64
+
65
+ ## Quick Start
66
+
67
+ ### Basic Usage
68
+
69
+ ```python
70
+ from langfuse_prompt_library import LangfuseManager
71
+
72
+ # Initialize manager (loads from environment variables)
73
+ lf = LangfuseManager()
74
+
75
+ # Call LLM with a prompt
76
+ response = lf.call_llm(
77
+ prompt_name="customer_support_agent",
78
+ user_input="How do I reset my password?",
79
+ prompt_label="production",
80
+ model="gpt-3.5-turbo"
81
+ )
82
+ ```
83
+
84
+ ### Using Claude (Anthropic)
85
+
86
+ ```python
87
+ # Claude models are auto-detected
88
+ response = lf.call_llm(
89
+ prompt_name="assistant",
90
+ user_input="Explain quantum computing",
91
+ model="claude-sonnet-4",
92
+ temperature=0.7
93
+ )
94
+ ```
95
+
96
+ ### Advanced Usage
97
+
98
+ ```python
99
+ from langfuse_prompt_library import LangfuseManager, LangfuseConfig
100
+
101
+ # Custom configuration
102
+ config = LangfuseConfig(
103
+ secret_key="your-secret-key",
104
+ public_key="your-public-key",
105
+ host="https://cloud.langfuse.com",
106
+ openai_api_key="your-openai-key",
107
+ enable_caching=True,
108
+ cache_ttl=3600,
109
+ request_timeout=60.0
110
+ )
111
+
112
+ lf = LangfuseManager(config=config)
113
+
114
+ # Fetch prompt manually
115
+ prompt = lf.get_prompt("my_prompt", label="production")
116
+ messages = lf.compile_prompt(prompt, user_input="Hello")
117
+
118
+ # Get cache statistics
119
+ stats = lf.get_cache_stats()
120
+
121
+ # Flush traces before shutdown
122
+ lf.flush()
123
+ ```
124
+
125
+ ## Configuration
126
+
127
+ Set these environment variables or pass them via `LangfuseConfig`:
128
+
129
+ ```bash
130
+ # Required
131
+ LANGFUSE_SECRET_KEY=your-secret-key
132
+ LANGFUSE_PUBLIC_KEY=your-public-key
133
+
134
+ # Optional
135
+ LANGFUSE_HOST=https://cloud.langfuse.com
136
+ OPENAI_API_KEY=your-openai-key
137
+ ANTHROPIC_API_KEY=your-anthropic-key
138
+
139
+ # Advanced options
140
+ LANGFUSE_ENABLE_CACHING=true
141
+ LANGFUSE_CACHE_TTL=3600
142
+ LANGFUSE_REQUEST_TIMEOUT=60.0
143
+ LANGFUSE_DEBUG=false
144
+ LANGFUSE_LOG_LEVEL=INFO
145
+ ```
146
+
147
+ ## API Reference
148
+
149
+ ### LangfuseManager
150
+
151
+ Main entry point for the library.
152
+
153
+ #### Methods
154
+
155
+ - `call_llm(prompt_name, user_input, ...)` - High-level method to fetch prompt and call LLM
156
+ - `get_prompt(name, version, label, cache)` - Fetch a prompt from Langfuse
157
+ - `compile_prompt(prompt, **variables)` - Compile prompt with variables
158
+ - `flush()` - Flush pending traces to Langfuse
159
+ - `get_cache_stats()` - Get cache statistics
160
+ - `clear_cache()` - Clear the prompt cache
161
+
162
+ ### LLMResponse
163
+
164
+ Response object containing:
165
+ - `content` - The generated text
166
+ - `model` - Model used
167
+ - `usage` - Token usage dict with 'input', 'output', 'total'
168
+ - `metadata` - Additional metadata
169
+ - `raw_response` - Raw API response
170
+
171
+ ### Exceptions
172
+
173
+ - `LangfuseLibraryError` - Base exception
174
+ - `ConfigurationError` - Configuration issues
175
+ - `PromptNotFoundError` - Prompt not found
176
+ - `ProviderError` - LLM provider errors
177
+ - `APITimeoutError` - API timeout
178
+ - `RateLimitError` - Rate limit exceeded
179
+ - `ValidationError` - Input validation failed
180
+
181
+ ## Examples
182
+
183
+ ### Error Handling
184
+
185
+ ```python
186
+ from langfuse_prompt_library import (
187
+ LangfuseManager,
188
+ PromptNotFoundError,
189
+ ProviderError
190
+ )
191
+
192
+ lf = LangfuseManager()
193
+
194
+ try:
195
+ response = lf.call_llm(
196
+ prompt_name="nonexistent_prompt",
197
+ user_input="Hello"
198
+ )
199
+ except PromptNotFoundError as e:
200
+ print(f"Prompt not found: {e}")
201
+ except ProviderError as e:
202
+ print(f"Provider error: {e}")
203
+ ```
204
+
205
+ ### Specific Prompt Version
206
+
207
+ ```python
208
+ # Use specific version
209
+ response = lf.call_llm(
210
+ prompt_name="assistant",
211
+ user_input="Hello",
212
+ prompt_version=5
213
+ )
214
+
215
+ # Or use label
216
+ response = lf.call_llm(
217
+ prompt_name="assistant",
218
+ user_input="Hello",
219
+ prompt_label="production"
220
+ )
221
+ ```
222
+
223
+ ## Requirements
224
+
225
+ - Python >= 3.8
226
+ - langfuse >= 3.0.0
227
+ - openai >= 1.0.0
228
+ - python-dotenv >= 1.0.0
229
+ - anthropic >= 0.18.0 (optional)
230
+
231
+ ## License
232
+
233
+ MIT License - see LICENSE file for details
234
+
235
+ ## Contributing
236
+
237
+ Contributions are welcome! Please open an issue or submit a pull request.
238
+
239
+ ## Support
240
+
241
+ For issues and questions:
242
+ - GitHub Issues: https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library/issues
243
+ - Langfuse Documentation: https://langfuse.com/docs
244
+
245
+ ## Changelog
246
+
247
+ ### 0.1.0
248
+ - Initial public release
249
+ - Support for OpenAI and Anthropic
250
+ - Automatic tracing and observability
251
+ - Thread-safe caching
252
+ - Comprehensive error handling
@@ -0,0 +1,210 @@
1
+ # Langfuse Prompt Library
2
+
3
+ A production-ready Python wrapper for [Langfuse](https://langfuse.com/) prompts and tracing with integrated LLM support (OpenAI, Anthropic).
4
+
5
+ ## Features
6
+
7
+ - **Prompt Management**: Fetch and manage prompts from Langfuse with built-in caching
8
+ - **Multi-Provider Support**: Works with OpenAI (GPT models) and Anthropic (Claude models)
9
+ - **Automatic Tracing**: Built-in observability and tracing for all LLM calls
10
+ - **Token Tracking**: Automatic token usage tracking and reporting
11
+ - **Retry Logic**: Configurable retry mechanism with exponential backoff
12
+ - **Type Safety**: Full type hints and validation
13
+ - **Thread-Safe Caching**: Efficient prompt caching with TTL support
14
+ - **Error Handling**: Comprehensive error handling and custom exceptions
15
+ - **Production Ready**: Designed for enterprise use with logging, metrics, and cleanup handlers
16
+
17
+ ## Installation
18
+
19
+ ```bash
20
+ pip install langfuse-prompt-library-iauro
21
+ ```
22
+
23
+ ## Quick Start
24
+
25
+ ### Basic Usage
26
+
27
+ ```python
28
+ from langfuse_prompt_library import LangfuseManager
29
+
30
+ # Initialize manager (loads from environment variables)
31
+ lf = LangfuseManager()
32
+
33
+ # Call LLM with a prompt
34
+ response = lf.call_llm(
35
+ prompt_name="customer_support_agent",
36
+ user_input="How do I reset my password?",
37
+ prompt_label="production",
38
+ model="gpt-3.5-turbo"
39
+ )
40
+ ```
41
+
42
+ ### Using Claude (Anthropic)
43
+
44
+ ```python
45
+ # Claude models are auto-detected
46
+ response = lf.call_llm(
47
+ prompt_name="assistant",
48
+ user_input="Explain quantum computing",
49
+ model="claude-sonnet-4",
50
+ temperature=0.7
51
+ )
52
+ ```
53
+
54
+ ### Advanced Usage
55
+
56
+ ```python
57
+ from langfuse_prompt_library import LangfuseManager, LangfuseConfig
58
+
59
+ # Custom configuration
60
+ config = LangfuseConfig(
61
+ secret_key="your-secret-key",
62
+ public_key="your-public-key",
63
+ host="https://cloud.langfuse.com",
64
+ openai_api_key="your-openai-key",
65
+ enable_caching=True,
66
+ cache_ttl=3600,
67
+ request_timeout=60.0
68
+ )
69
+
70
+ lf = LangfuseManager(config=config)
71
+
72
+ # Fetch prompt manually
73
+ prompt = lf.get_prompt("my_prompt", label="production")
74
+ messages = lf.compile_prompt(prompt, user_input="Hello")
75
+
76
+ # Get cache statistics
77
+ stats = lf.get_cache_stats()
78
+
79
+ # Flush traces before shutdown
80
+ lf.flush()
81
+ ```
82
+
83
+ ## Configuration
84
+
85
+ Set these environment variables or pass them via `LangfuseConfig`:
86
+
87
+ ```bash
88
+ # Required
89
+ LANGFUSE_SECRET_KEY=your-secret-key
90
+ LANGFUSE_PUBLIC_KEY=your-public-key
91
+
92
+ # Optional
93
+ LANGFUSE_HOST=https://cloud.langfuse.com
94
+ OPENAI_API_KEY=your-openai-key
95
+ ANTHROPIC_API_KEY=your-anthropic-key
96
+
97
+ # Advanced options
98
+ LANGFUSE_ENABLE_CACHING=true
99
+ LANGFUSE_CACHE_TTL=3600
100
+ LANGFUSE_REQUEST_TIMEOUT=60.0
101
+ LANGFUSE_DEBUG=false
102
+ LANGFUSE_LOG_LEVEL=INFO
103
+ ```
104
+
105
+ ## API Reference
106
+
107
+ ### LangfuseManager
108
+
109
+ Main entry point for the library.
110
+
111
+ #### Methods
112
+
113
+ - `call_llm(prompt_name, user_input, ...)` - High-level method to fetch prompt and call LLM
114
+ - `get_prompt(name, version, label, cache)` - Fetch a prompt from Langfuse
115
+ - `compile_prompt(prompt, **variables)` - Compile prompt with variables
116
+ - `flush()` - Flush pending traces to Langfuse
117
+ - `get_cache_stats()` - Get cache statistics
118
+ - `clear_cache()` - Clear the prompt cache
119
+
120
+ ### LLMResponse
121
+
122
+ Response object containing:
123
+ - `content` - The generated text
124
+ - `model` - Model used
125
+ - `usage` - Token usage dict with 'input', 'output', 'total'
126
+ - `metadata` - Additional metadata
127
+ - `raw_response` - Raw API response
128
+
129
+ ### Exceptions
130
+
131
+ - `LangfuseLibraryError` - Base exception
132
+ - `ConfigurationError` - Configuration issues
133
+ - `PromptNotFoundError` - Prompt not found
134
+ - `ProviderError` - LLM provider errors
135
+ - `APITimeoutError` - API timeout
136
+ - `RateLimitError` - Rate limit exceeded
137
+ - `ValidationError` - Input validation failed
138
+
139
+ ## Examples
140
+
141
+ ### Error Handling
142
+
143
+ ```python
144
+ from langfuse_prompt_library import (
145
+ LangfuseManager,
146
+ PromptNotFoundError,
147
+ ProviderError
148
+ )
149
+
150
+ lf = LangfuseManager()
151
+
152
+ try:
153
+ response = lf.call_llm(
154
+ prompt_name="nonexistent_prompt",
155
+ user_input="Hello"
156
+ )
157
+ except PromptNotFoundError as e:
158
+ print(f"Prompt not found: {e}")
159
+ except ProviderError as e:
160
+ print(f"Provider error: {e}")
161
+ ```
162
+
163
+ ### Specific Prompt Version
164
+
165
+ ```python
166
+ # Use specific version
167
+ response = lf.call_llm(
168
+ prompt_name="assistant",
169
+ user_input="Hello",
170
+ prompt_version=5
171
+ )
172
+
173
+ # Or use label
174
+ response = lf.call_llm(
175
+ prompt_name="assistant",
176
+ user_input="Hello",
177
+ prompt_label="production"
178
+ )
179
+ ```
180
+
181
+ ## Requirements
182
+
183
+ - Python >= 3.8
184
+ - langfuse >= 3.0.0
185
+ - openai >= 1.0.0
186
+ - python-dotenv >= 1.0.0
187
+ - anthropic >= 0.18.0 (optional)
188
+
189
+ ## License
190
+
191
+ MIT License - see LICENSE file for details
192
+
193
+ ## Contributing
194
+
195
+ Contributions are welcome! Please open an issue or submit a pull request.
196
+
197
+ ## Support
198
+
199
+ For issues and questions:
200
+ - GitHub Issues: https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library/issues
201
+ - Langfuse Documentation: https://langfuse.com/docs
202
+
203
+ ## Changelog
204
+
205
+ ### 0.1.0
206
+ - Initial public release
207
+ - Support for OpenAI and Anthropic
208
+ - Automatic tracing and observability
209
+ - Thread-safe caching
210
+ - Comprehensive error handling
@@ -0,0 +1,89 @@
1
+ [build-system]
2
+ requires = ["setuptools>=61.0", "wheel"]
3
+ build-backend = "setuptools.build_meta"
4
+
5
+ [project]
6
+ name = "langfuse-prompt-library-iauro"
7
+ version = "0.1.0"
8
+ description = "A production-ready wrapper for Langfuse prompts and tracing with LLM integrations"
9
+ readme = "README.md"
10
+ requires-python = ">=3.8"
11
+ license = {text = "MIT"}
12
+ authors = [
13
+ {name = "Sunny Mane", email = "sunny.mane@iauro.com"}
14
+ ]
15
+ maintainers = [
16
+ {name = "Vaibhav Hopal", email = "vaibhav.hopal@iauro.com"}
17
+ ]
18
+ keywords = [
19
+ "langfuse",
20
+ "llm",
21
+ "openai",
22
+ "anthropic",
23
+ "prompt-management",
24
+ "tracing",
25
+ "observability",
26
+ "langchain"
27
+ ]
28
+ classifiers = [
29
+ "Development Status :: 4 - Beta",
30
+ "Intended Audience :: Developers",
31
+ "License :: OSI Approved :: MIT License",
32
+ "Programming Language :: Python :: 3",
33
+ "Programming Language :: Python :: 3.8",
34
+ "Programming Language :: Python :: 3.9",
35
+ "Programming Language :: Python :: 3.10",
36
+ "Programming Language :: Python :: 3.11",
37
+ "Programming Language :: Python :: 3.12",
38
+ "Topic :: Software Development :: Libraries :: Python Modules",
39
+ "Topic :: Scientific/Engineering :: Artificial Intelligence",
40
+ ]
41
+
42
+ dependencies = [
43
+ "langfuse>=3.0.0",
44
+ "openai>=1.0.0",
45
+ "python-dotenv>=1.0.0",
46
+ ]
47
+
48
+ [project.optional-dependencies]
49
+ anthropic = [
50
+ "anthropic>=0.18.0",
51
+ ]
52
+ dev = [
53
+ "pytest>=7.0.0",
54
+ "pytest-cov>=4.0.0",
55
+ "black>=23.0.0",
56
+ "flake8>=6.0.0",
57
+ "mypy>=1.0.0",
58
+ "isort>=5.12.0",
59
+ ]
60
+ all = [
61
+ "anthropic>=0.18.0",
62
+ ]
63
+
64
+ [project.urls]
65
+ Homepage = "https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library"
66
+ Documentation = "https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library#readme"
67
+ Repository = "https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library"
68
+ Issues = "https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library/issues"
69
+
70
+ [tool.setuptools]
71
+ package-dir = {"" = "src"}
72
+ packages = ["langfuse_prompt_library", "utils"]
73
+
74
+ [tool.setuptools.package-data]
75
+ langfuse_prompt_library = ["py.typed"]
76
+
77
+ [tool.black]
78
+ line-length = 100
79
+ target-version = ["py38", "py39", "py310", "py311"]
80
+
81
+ [tool.isort]
82
+ profile = "black"
83
+ line_length = 100
84
+
85
+ [tool.mypy]
86
+ python_version = "3.8"
87
+ warn_return_any = true
88
+ warn_unused_configs = true
89
+ disallow_untyped_defs = false
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -0,0 +1,54 @@
1
+ """
2
+ Langfuse Library - A production-ready wrapper for Langfuse prompts and tracing.
3
+
4
+ This library provides a robust, enterprise-grade interface for:
5
+ - Fetching and managing prompts from Langfuse with caching
6
+ - Calling LLMs with automatic tracing and retry logic
7
+ - Token tracking and observability
8
+ - Error handling and validation
9
+ - Performance metrics
10
+
11
+ Example:
12
+ >>> from langfuse_prompt_library import LangfuseManager
13
+ >>>
14
+ >>> lf = LangfuseManager()
15
+ >>> response = lf.call_llm(
16
+ ... prompt_name="customer_support_agent",
17
+ ... user_input="How do I reset my password?",
18
+ ... prompt_label="production"
19
+ ... )
20
+ >>> print(response.content)
21
+ """
22
+
23
+ from .manager import LangfuseManager
24
+ from .models import LLMResponse
25
+ from .config import LangfuseConfig
26
+ from utils.logger import get_logger
27
+ from .exceptions import (
28
+ LangfuseLibraryError,
29
+ ConfigurationError,
30
+ PromptNotFoundError,
31
+ ProviderError,
32
+ APITimeoutError,
33
+ RateLimitError,
34
+ CacheError,
35
+ ValidationError,
36
+ TracingError
37
+ )
38
+
39
+ __version__ = "0.1.0"
40
+ __all__ = [
41
+ "LangfuseManager",
42
+ "LLMResponse",
43
+ "LangfuseConfig",
44
+ "get_logger",
45
+ "LangfuseLibraryError",
46
+ "ConfigurationError",
47
+ "PromptNotFoundError",
48
+ "ProviderError",
49
+ "APITimeoutError",
50
+ "RateLimitError",
51
+ "CacheError",
52
+ "ValidationError",
53
+ "TracingError"
54
+ ]