langfuse-prompt-library-iauro 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- langfuse_prompt_library/__init__.py +54 -0
- langfuse_prompt_library/config.py +153 -0
- langfuse_prompt_library/exceptions.py +95 -0
- langfuse_prompt_library/manager.py +663 -0
- langfuse_prompt_library/models.py +42 -0
- langfuse_prompt_library_iauro-0.1.0.dist-info/METADATA +252 -0
- langfuse_prompt_library_iauro-0.1.0.dist-info/RECORD +13 -0
- langfuse_prompt_library_iauro-0.1.0.dist-info/WHEEL +5 -0
- langfuse_prompt_library_iauro-0.1.0.dist-info/licenses/LICENSE +21 -0
- langfuse_prompt_library_iauro-0.1.0.dist-info/top_level.txt +2 -0
- utils/__init__.py +1 -0
- utils/logger.py +122 -0
- utils/utility.py +302 -0
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: langfuse-prompt-library-iauro
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A production-ready wrapper for Langfuse prompts and tracing with LLM integrations
|
|
5
|
+
Author-email: Sunny Mane <sunny.mane@iauro.com>
|
|
6
|
+
Maintainer-email: Vaibhav Hopal <vaibhav.hopal@iauro.com>
|
|
7
|
+
License: MIT
|
|
8
|
+
Project-URL: Homepage, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library
|
|
9
|
+
Project-URL: Documentation, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library#readme
|
|
10
|
+
Project-URL: Repository, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library
|
|
11
|
+
Project-URL: Issues, https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library/issues
|
|
12
|
+
Keywords: langfuse,llm,openai,anthropic,prompt-management,tracing,observability,langchain
|
|
13
|
+
Classifier: Development Status :: 4 - Beta
|
|
14
|
+
Classifier: Intended Audience :: Developers
|
|
15
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
16
|
+
Classifier: Programming Language :: Python :: 3
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
22
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
23
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
24
|
+
Requires-Python: >=3.8
|
|
25
|
+
Description-Content-Type: text/markdown
|
|
26
|
+
License-File: LICENSE
|
|
27
|
+
Requires-Dist: langfuse>=3.0.0
|
|
28
|
+
Requires-Dist: openai>=1.0.0
|
|
29
|
+
Requires-Dist: python-dotenv>=1.0.0
|
|
30
|
+
Provides-Extra: anthropic
|
|
31
|
+
Requires-Dist: anthropic>=0.18.0; extra == "anthropic"
|
|
32
|
+
Provides-Extra: dev
|
|
33
|
+
Requires-Dist: pytest>=7.0.0; extra == "dev"
|
|
34
|
+
Requires-Dist: pytest-cov>=4.0.0; extra == "dev"
|
|
35
|
+
Requires-Dist: black>=23.0.0; extra == "dev"
|
|
36
|
+
Requires-Dist: flake8>=6.0.0; extra == "dev"
|
|
37
|
+
Requires-Dist: mypy>=1.0.0; extra == "dev"
|
|
38
|
+
Requires-Dist: isort>=5.12.0; extra == "dev"
|
|
39
|
+
Provides-Extra: all
|
|
40
|
+
Requires-Dist: anthropic>=0.18.0; extra == "all"
|
|
41
|
+
Dynamic: license-file
|
|
42
|
+
|
|
43
|
+
# Langfuse Prompt Library
|
|
44
|
+
|
|
45
|
+
A production-ready Python wrapper for [Langfuse](https://langfuse.com/) prompts and tracing with integrated LLM support (OpenAI, Anthropic).
|
|
46
|
+
|
|
47
|
+
## Features
|
|
48
|
+
|
|
49
|
+
- **Prompt Management**: Fetch and manage prompts from Langfuse with built-in caching
|
|
50
|
+
- **Multi-Provider Support**: Works with OpenAI (GPT models) and Anthropic (Claude models)
|
|
51
|
+
- **Automatic Tracing**: Built-in observability and tracing for all LLM calls
|
|
52
|
+
- **Token Tracking**: Automatic token usage tracking and reporting
|
|
53
|
+
- **Retry Logic**: Configurable retry mechanism with exponential backoff
|
|
54
|
+
- **Type Safety**: Full type hints and validation
|
|
55
|
+
- **Thread-Safe Caching**: Efficient prompt caching with TTL support
|
|
56
|
+
- **Error Handling**: Comprehensive error handling and custom exceptions
|
|
57
|
+
- **Production Ready**: Designed for enterprise use with logging, metrics, and cleanup handlers
|
|
58
|
+
|
|
59
|
+
## Installation
|
|
60
|
+
|
|
61
|
+
```bash
|
|
62
|
+
pip install langfuse-prompt-library-iauro
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
## Quick Start
|
|
66
|
+
|
|
67
|
+
### Basic Usage
|
|
68
|
+
|
|
69
|
+
```python
|
|
70
|
+
from langfuse_prompt_library import LangfuseManager
|
|
71
|
+
|
|
72
|
+
# Initialize manager (loads from environment variables)
|
|
73
|
+
lf = LangfuseManager()
|
|
74
|
+
|
|
75
|
+
# Call LLM with a prompt
|
|
76
|
+
response = lf.call_llm(
|
|
77
|
+
prompt_name="customer_support_agent",
|
|
78
|
+
user_input="How do I reset my password?",
|
|
79
|
+
prompt_label="production",
|
|
80
|
+
model="gpt-3.5-turbo"
|
|
81
|
+
)
|
|
82
|
+
```
|
|
83
|
+
|
|
84
|
+
### Using Claude (Anthropic)
|
|
85
|
+
|
|
86
|
+
```python
|
|
87
|
+
# Claude models are auto-detected
|
|
88
|
+
response = lf.call_llm(
|
|
89
|
+
prompt_name="assistant",
|
|
90
|
+
user_input="Explain quantum computing",
|
|
91
|
+
model="claude-sonnet-4",
|
|
92
|
+
temperature=0.7
|
|
93
|
+
)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
### Advanced Usage
|
|
97
|
+
|
|
98
|
+
```python
|
|
99
|
+
from langfuse_prompt_library import LangfuseManager, LangfuseConfig
|
|
100
|
+
|
|
101
|
+
# Custom configuration
|
|
102
|
+
config = LangfuseConfig(
|
|
103
|
+
secret_key="your-secret-key",
|
|
104
|
+
public_key="your-public-key",
|
|
105
|
+
host="https://cloud.langfuse.com",
|
|
106
|
+
openai_api_key="your-openai-key",
|
|
107
|
+
enable_caching=True,
|
|
108
|
+
cache_ttl=3600,
|
|
109
|
+
request_timeout=60.0
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
lf = LangfuseManager(config=config)
|
|
113
|
+
|
|
114
|
+
# Fetch prompt manually
|
|
115
|
+
prompt = lf.get_prompt("my_prompt", label="production")
|
|
116
|
+
messages = lf.compile_prompt(prompt, user_input="Hello")
|
|
117
|
+
|
|
118
|
+
# Get cache statistics
|
|
119
|
+
stats = lf.get_cache_stats()
|
|
120
|
+
|
|
121
|
+
# Flush traces before shutdown
|
|
122
|
+
lf.flush()
|
|
123
|
+
```
|
|
124
|
+
|
|
125
|
+
## Configuration
|
|
126
|
+
|
|
127
|
+
Set these environment variables or pass them via `LangfuseConfig`:
|
|
128
|
+
|
|
129
|
+
```bash
|
|
130
|
+
# Required
|
|
131
|
+
LANGFUSE_SECRET_KEY=your-secret-key
|
|
132
|
+
LANGFUSE_PUBLIC_KEY=your-public-key
|
|
133
|
+
|
|
134
|
+
# Optional
|
|
135
|
+
LANGFUSE_HOST=https://cloud.langfuse.com
|
|
136
|
+
OPENAI_API_KEY=your-openai-key
|
|
137
|
+
ANTHROPIC_API_KEY=your-anthropic-key
|
|
138
|
+
|
|
139
|
+
# Advanced options
|
|
140
|
+
LANGFUSE_ENABLE_CACHING=true
|
|
141
|
+
LANGFUSE_CACHE_TTL=3600
|
|
142
|
+
LANGFUSE_REQUEST_TIMEOUT=60.0
|
|
143
|
+
LANGFUSE_DEBUG=false
|
|
144
|
+
LANGFUSE_LOG_LEVEL=INFO
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
## API Reference
|
|
148
|
+
|
|
149
|
+
### LangfuseManager
|
|
150
|
+
|
|
151
|
+
Main entry point for the library.
|
|
152
|
+
|
|
153
|
+
#### Methods
|
|
154
|
+
|
|
155
|
+
- `call_llm(prompt_name, user_input, ...)` - High-level method to fetch prompt and call LLM
|
|
156
|
+
- `get_prompt(name, version, label, cache)` - Fetch a prompt from Langfuse
|
|
157
|
+
- `compile_prompt(prompt, **variables)` - Compile prompt with variables
|
|
158
|
+
- `flush()` - Flush pending traces to Langfuse
|
|
159
|
+
- `get_cache_stats()` - Get cache statistics
|
|
160
|
+
- `clear_cache()` - Clear the prompt cache
|
|
161
|
+
|
|
162
|
+
### LLMResponse
|
|
163
|
+
|
|
164
|
+
Response object containing:
|
|
165
|
+
- `content` - The generated text
|
|
166
|
+
- `model` - Model used
|
|
167
|
+
- `usage` - Token usage dict with 'input', 'output', 'total'
|
|
168
|
+
- `metadata` - Additional metadata
|
|
169
|
+
- `raw_response` - Raw API response
|
|
170
|
+
|
|
171
|
+
### Exceptions
|
|
172
|
+
|
|
173
|
+
- `LangfuseLibraryError` - Base exception
|
|
174
|
+
- `ConfigurationError` - Configuration issues
|
|
175
|
+
- `PromptNotFoundError` - Prompt not found
|
|
176
|
+
- `ProviderError` - LLM provider errors
|
|
177
|
+
- `APITimeoutError` - API timeout
|
|
178
|
+
- `RateLimitError` - Rate limit exceeded
|
|
179
|
+
- `ValidationError` - Input validation failed
|
|
180
|
+
|
|
181
|
+
## Examples
|
|
182
|
+
|
|
183
|
+
### Error Handling
|
|
184
|
+
|
|
185
|
+
```python
|
|
186
|
+
from langfuse_prompt_library import (
|
|
187
|
+
LangfuseManager,
|
|
188
|
+
PromptNotFoundError,
|
|
189
|
+
ProviderError
|
|
190
|
+
)
|
|
191
|
+
|
|
192
|
+
lf = LangfuseManager()
|
|
193
|
+
|
|
194
|
+
try:
|
|
195
|
+
response = lf.call_llm(
|
|
196
|
+
prompt_name="nonexistent_prompt",
|
|
197
|
+
user_input="Hello"
|
|
198
|
+
)
|
|
199
|
+
except PromptNotFoundError as e:
|
|
200
|
+
print(f"Prompt not found: {e}")
|
|
201
|
+
except ProviderError as e:
|
|
202
|
+
print(f"Provider error: {e}")
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
### Specific Prompt Version
|
|
206
|
+
|
|
207
|
+
```python
|
|
208
|
+
# Use specific version
|
|
209
|
+
response = lf.call_llm(
|
|
210
|
+
prompt_name="assistant",
|
|
211
|
+
user_input="Hello",
|
|
212
|
+
prompt_version=5
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
# Or use label
|
|
216
|
+
response = lf.call_llm(
|
|
217
|
+
prompt_name="assistant",
|
|
218
|
+
user_input="Hello",
|
|
219
|
+
prompt_label="production"
|
|
220
|
+
)
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
## Requirements
|
|
224
|
+
|
|
225
|
+
- Python >= 3.8
|
|
226
|
+
- langfuse >= 3.0.0
|
|
227
|
+
- openai >= 1.0.0
|
|
228
|
+
- python-dotenv >= 1.0.0
|
|
229
|
+
- anthropic >= 0.18.0 (optional)
|
|
230
|
+
|
|
231
|
+
## License
|
|
232
|
+
|
|
233
|
+
MIT License - see LICENSE file for details
|
|
234
|
+
|
|
235
|
+
## Contributing
|
|
236
|
+
|
|
237
|
+
Contributions are welcome! Please open an issue or submit a pull request.
|
|
238
|
+
|
|
239
|
+
## Support
|
|
240
|
+
|
|
241
|
+
For issues and questions:
|
|
242
|
+
- GitHub Issues: https://gitlab.iauro.co/hpe/backend/langfuse-prompt-library/issues
|
|
243
|
+
- Langfuse Documentation: https://langfuse.com/docs
|
|
244
|
+
|
|
245
|
+
## Changelog
|
|
246
|
+
|
|
247
|
+
### 0.1.0
|
|
248
|
+
- Initial public release
|
|
249
|
+
- Support for OpenAI and Anthropic
|
|
250
|
+
- Automatic tracing and observability
|
|
251
|
+
- Thread-safe caching
|
|
252
|
+
- Comprehensive error handling
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
langfuse_prompt_library/__init__.py,sha256=7MRsJahhfRIaNyagoL2g8IFiW_55uCFNmgD7dIVz02Q,1364
|
|
2
|
+
langfuse_prompt_library/config.py,sha256=9PqSXTrF2XQrFm38ElCdTLUzVD-t2QDykPGCEE2ZX6c,6435
|
|
3
|
+
langfuse_prompt_library/exceptions.py,sha256=qUbv7qb4smFmYVFrISLMSiqQcTXDUl2nRB6z7FkyYR0,2680
|
|
4
|
+
langfuse_prompt_library/manager.py,sha256=CESylN25kPHoQWHdxIbS-q27xcH7S8ZStN27-c_n56k,22316
|
|
5
|
+
langfuse_prompt_library/models.py,sha256=Cp45TCwMfTVKtqHToolLuNnaQvZkZIbjcE4aigOrFcQ,1131
|
|
6
|
+
langfuse_prompt_library_iauro-0.1.0.dist-info/licenses/LICENSE,sha256=WkTQ1IKE1-kPoGHfolOIMpJ04k-bBVxY5adbmMbx-X8,1094
|
|
7
|
+
utils/__init__.py,sha256=7Q3BxyXETkt3tm5trhuLTyL8PoECOK0QiK-0KUVAR2Q,16
|
|
8
|
+
utils/logger.py,sha256=AuC3vLI_PrW-mWl7ldPmldabUXvoKczIEwy3wBS3dy8,4021
|
|
9
|
+
utils/utility.py,sha256=E55fPbLza9fZdXTVZmOHYQl68AxRFDP-UmRLY6w_ku4,8891
|
|
10
|
+
langfuse_prompt_library_iauro-0.1.0.dist-info/METADATA,sha256=as3VSHBUKuFjbBl10gUoMvLU79PlLPVDf6LQ5b-mELg,6927
|
|
11
|
+
langfuse_prompt_library_iauro-0.1.0.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
12
|
+
langfuse_prompt_library_iauro-0.1.0.dist-info/top_level.txt,sha256=n-mE7H4INCBU0VXyF3jCiOHMNqvkfbHuzOHsjF7_IhQ,30
|
|
13
|
+
langfuse_prompt_library_iauro-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 [Vaibhav Hopal/iauro Systems Pvt Ltd]
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
# Utils package
|
utils/logger.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Logging utilities for Langfuse library.
|
|
3
|
+
|
|
4
|
+
Provides structured logging with appropriate log levels and formatting.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
import sys
|
|
9
|
+
from typing import Optional
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class LangfuseLogger:
|
|
13
|
+
"""Structured logger for Langfuse library operations."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, name: str = "langfuse_prompt_library", level: int = logging.INFO):
|
|
16
|
+
"""Initialize logger.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
name: Logger name
|
|
20
|
+
level: Logging level (default: INFO)
|
|
21
|
+
"""
|
|
22
|
+
self.logger = logging.getLogger(name)
|
|
23
|
+
self.logger.setLevel(level)
|
|
24
|
+
|
|
25
|
+
# Remove existing handlers to avoid duplicates
|
|
26
|
+
self.logger.handlers.clear()
|
|
27
|
+
|
|
28
|
+
# Create console handler with formatting
|
|
29
|
+
handler = logging.StreamHandler(sys.stdout)
|
|
30
|
+
handler.setLevel(level)
|
|
31
|
+
|
|
32
|
+
# Create formatter
|
|
33
|
+
formatter = logging.Formatter(
|
|
34
|
+
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
|
35
|
+
datefmt="%Y-%m-%d %H:%M:%S"
|
|
36
|
+
)
|
|
37
|
+
handler.setFormatter(formatter)
|
|
38
|
+
|
|
39
|
+
self.logger.addHandler(handler)
|
|
40
|
+
self.logger.propagate = False
|
|
41
|
+
|
|
42
|
+
def debug(self, message: str, **kwargs) -> None:
|
|
43
|
+
"""Log debug message.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
message: Log message
|
|
47
|
+
**kwargs: Additional context as key-value pairs
|
|
48
|
+
"""
|
|
49
|
+
extra_info = " | ".join(f"{k}={v}" for k, v in kwargs.items()) if kwargs else ""
|
|
50
|
+
full_message = f"{message} | {extra_info}" if extra_info else message
|
|
51
|
+
self.logger.debug(full_message)
|
|
52
|
+
|
|
53
|
+
def info(self, message: str, **kwargs) -> None:
|
|
54
|
+
"""Log info message.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
message: Log message
|
|
58
|
+
**kwargs: Additional context as key-value pairs
|
|
59
|
+
"""
|
|
60
|
+
extra_info = " | ".join(f"{k}={v}" for k, v in kwargs.items()) if kwargs else ""
|
|
61
|
+
full_message = f"{message} | {extra_info}" if extra_info else message
|
|
62
|
+
self.logger.info(full_message)
|
|
63
|
+
|
|
64
|
+
def warning(self, message: str, **kwargs) -> None:
|
|
65
|
+
"""Log warning message.
|
|
66
|
+
|
|
67
|
+
Args:
|
|
68
|
+
message: Log message
|
|
69
|
+
**kwargs: Additional context as key-value pairs
|
|
70
|
+
"""
|
|
71
|
+
extra_info = " | ".join(f"{k}={v}" for k, v in kwargs.items()) if kwargs else ""
|
|
72
|
+
full_message = f"{message} | {extra_info}" if extra_info else message
|
|
73
|
+
self.logger.warning(full_message)
|
|
74
|
+
|
|
75
|
+
def error(self, message: str, exc_info: bool = False, **kwargs) -> None:
|
|
76
|
+
"""Log error message.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
message: Log message
|
|
80
|
+
exc_info: Include exception information
|
|
81
|
+
**kwargs: Additional context as key-value pairs
|
|
82
|
+
"""
|
|
83
|
+
extra_info = " | ".join(f"{k}={v}" for k, v in kwargs.items()) if kwargs else ""
|
|
84
|
+
full_message = f"{message} | {extra_info}" if extra_info else message
|
|
85
|
+
self.logger.error(full_message, exc_info=exc_info)
|
|
86
|
+
|
|
87
|
+
def critical(self, message: str, exc_info: bool = False, **kwargs) -> None:
|
|
88
|
+
"""Log critical message.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
message: Log message
|
|
92
|
+
exc_info: Include exception information
|
|
93
|
+
**kwargs: Additional context as key-value pairs
|
|
94
|
+
"""
|
|
95
|
+
extra_info = " | ".join(f"{k}={v}" for k, v in kwargs.items()) if kwargs else ""
|
|
96
|
+
full_message = f"{message} | {extra_info}" if extra_info else message
|
|
97
|
+
self.logger.critical(full_message, exc_info=exc_info)
|
|
98
|
+
|
|
99
|
+
def set_level(self, level: int) -> None:
|
|
100
|
+
"""Change logging level.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
level: New logging level (e.g., logging.DEBUG, logging.INFO)
|
|
104
|
+
"""
|
|
105
|
+
self.logger.setLevel(level)
|
|
106
|
+
for handler in self.logger.handlers:
|
|
107
|
+
handler.setLevel(level)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def get_logger(name: str = "langfuse_prompt_library", level: Optional[int] = None) -> LangfuseLogger:
|
|
111
|
+
"""Get or create a logger instance.
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
name: Logger name
|
|
115
|
+
level: Optional logging level
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
LangfuseLogger instance
|
|
119
|
+
"""
|
|
120
|
+
if level is None:
|
|
121
|
+
level = logging.INFO
|
|
122
|
+
return LangfuseLogger(name, level)
|