lmnr 0.3.7__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lmnr/__init__.py +2 -6
- lmnr/sdk/decorators.py +55 -267
- lmnr/sdk/laminar.py +380 -0
- lmnr/sdk/log.py +39 -0
- lmnr/sdk/utils.py +10 -11
- lmnr-0.4.0.dist-info/METADATA +151 -0
- lmnr-0.4.0.dist-info/RECORD +12 -0
- lmnr/sdk/client.py +0 -161
- lmnr/sdk/collector.py +0 -177
- lmnr/sdk/constants.py +0 -1
- lmnr/sdk/context.py +0 -483
- lmnr/sdk/interface.py +0 -316
- lmnr/sdk/providers/__init__.py +0 -2
- lmnr/sdk/providers/base.py +0 -28
- lmnr/sdk/providers/fallback.py +0 -154
- lmnr/sdk/providers/openai.py +0 -159
- lmnr/sdk/providers/utils.py +0 -33
- lmnr/sdk/tracing_types.py +0 -210
- lmnr/semantic_conventions/__init__.py +0 -0
- lmnr/semantic_conventions/gen_ai_spans.py +0 -48
- lmnr-0.3.7.dist-info/METADATA +0 -266
- lmnr-0.3.7.dist-info/RECORD +0 -23
- {lmnr-0.3.7.dist-info → lmnr-0.4.0.dist-info}/LICENSE +0 -0
- {lmnr-0.3.7.dist-info → lmnr-0.4.0.dist-info}/WHEEL +0 -0
- {lmnr-0.3.7.dist-info → lmnr-0.4.0.dist-info}/entry_points.txt +0 -0
lmnr/sdk/laminar.py
ADDED
@@ -0,0 +1,380 @@
|
|
1
|
+
from opentelemetry import context
|
2
|
+
from opentelemetry.trace import (
|
3
|
+
INVALID_SPAN,
|
4
|
+
get_current_span,
|
5
|
+
set_span_in_context,
|
6
|
+
Span,
|
7
|
+
)
|
8
|
+
from opentelemetry.semconv_ai import SpanAttributes
|
9
|
+
from opentelemetry.util.types import AttributeValue
|
10
|
+
from traceloop.sdk import Traceloop
|
11
|
+
from traceloop.sdk.tracing import get_tracer
|
12
|
+
|
13
|
+
from pydantic.alias_generators import to_snake
|
14
|
+
from typing import Any, Optional, Tuple, Union
|
15
|
+
|
16
|
+
import copy
|
17
|
+
import datetime
|
18
|
+
import dotenv
|
19
|
+
import json
|
20
|
+
import logging
|
21
|
+
import os
|
22
|
+
import requests
|
23
|
+
import uuid
|
24
|
+
|
25
|
+
from .log import VerboseColorfulFormatter
|
26
|
+
|
27
|
+
from .types import (
|
28
|
+
PipelineRunError,
|
29
|
+
PipelineRunResponse,
|
30
|
+
NodeInput,
|
31
|
+
PipelineRunRequest,
|
32
|
+
)
|
33
|
+
|
34
|
+
|
35
|
+
class Laminar:
|
36
|
+
__base_url: str = "https://api.lmnr.ai"
|
37
|
+
__project_api_key: Optional[str] = None
|
38
|
+
__env: dict[str, str] = {}
|
39
|
+
__initialized: bool = False
|
40
|
+
|
41
|
+
@classmethod
|
42
|
+
def initialize(
|
43
|
+
cls,
|
44
|
+
project_api_key: Optional[str] = None,
|
45
|
+
env: dict[str, str] = {},
|
46
|
+
base_url: Optional[str] = None,
|
47
|
+
):
|
48
|
+
"""Initialize Laminar context across the application.
|
49
|
+
This method must be called before using any other Laminar methods or
|
50
|
+
decorators.
|
51
|
+
|
52
|
+
Args:
|
53
|
+
project_api_key (Optional[str], optional): Laminar project api key.
|
54
|
+
You can generate one by going to the projects
|
55
|
+
settings page on the Laminar dashboard.
|
56
|
+
If not specified, it will try to read from the
|
57
|
+
LMNR_PROJECT_API_KEY environment variable
|
58
|
+
in os.environ or in .env file.
|
59
|
+
Defaults to None.
|
60
|
+
env (dict[str, str], optional): Default environment passed to
|
61
|
+
`run` and `evaluate_event` requests, unless
|
62
|
+
overriden at request time. Usually, model
|
63
|
+
provider keys are stored here.
|
64
|
+
Defaults to {}.
|
65
|
+
base_url (Optional[str], optional): Url of Laminar endpoint,
|
66
|
+
or the customopen telemetry ingester.
|
67
|
+
If not specified, defaults to
|
68
|
+
https://api.lmnr.ai.
|
69
|
+
For locally hosted Laminar, default setting
|
70
|
+
must be http://localhost:8000
|
71
|
+
Defaults to None.
|
72
|
+
|
73
|
+
Raises:
|
74
|
+
ValueError: If project API key is not set
|
75
|
+
"""
|
76
|
+
cls.__project_api_key = project_api_key or os.environ.get(
|
77
|
+
"LMNR_PROJECT_API_KEY"
|
78
|
+
)
|
79
|
+
if not project_api_key:
|
80
|
+
dotenv_path = dotenv.find_dotenv(usecwd=True)
|
81
|
+
cls.__project_api_key = dotenv.get_key(
|
82
|
+
dotenv_path=dotenv_path, key_to_get="LMNR_PROJECT_API_KEY"
|
83
|
+
)
|
84
|
+
if not cls.__project_api_key:
|
85
|
+
raise ValueError(
|
86
|
+
"Please initialize the Laminar object with"
|
87
|
+
" your project API key or set the LMNR_PROJECT_API_KEY"
|
88
|
+
" environment variable in your environment or .env file"
|
89
|
+
)
|
90
|
+
if base_url is not None:
|
91
|
+
cls.__base_url = base_url
|
92
|
+
cls.__env = env
|
93
|
+
cls.__initialized = True
|
94
|
+
cls._initialize_logger()
|
95
|
+
Traceloop.init(
|
96
|
+
api_endpoint=cls.__base_url,
|
97
|
+
api_key=cls.__project_api_key,
|
98
|
+
disable_batch=True,
|
99
|
+
)
|
100
|
+
|
101
|
+
@classmethod
|
102
|
+
def is_initialized(cls):
|
103
|
+
"""Check if Laminar is initialized. A utility to make sure other
|
104
|
+
methods are called after initialization.
|
105
|
+
|
106
|
+
Returns:
|
107
|
+
bool: True if Laminar is initialized, False otherwise
|
108
|
+
"""
|
109
|
+
return cls.__initialized
|
110
|
+
|
111
|
+
@classmethod
|
112
|
+
def _initialize_logger(cls):
|
113
|
+
cls.__logger = logging.getLogger(__name__)
|
114
|
+
console_log_handler = logging.StreamHandler()
|
115
|
+
console_log_handler.setFormatter(VerboseColorfulFormatter())
|
116
|
+
cls.__logger.addHandler(console_log_handler)
|
117
|
+
|
118
|
+
@classmethod
|
119
|
+
def run(
|
120
|
+
cls,
|
121
|
+
pipeline: str,
|
122
|
+
inputs: dict[str, NodeInput],
|
123
|
+
env: dict[str, str] = {},
|
124
|
+
metadata: dict[str, str] = {},
|
125
|
+
parent_span_id: Optional[uuid.UUID] = None,
|
126
|
+
trace_id: Optional[uuid.UUID] = None,
|
127
|
+
) -> PipelineRunResponse:
|
128
|
+
"""Runs the pipeline with the given inputs
|
129
|
+
|
130
|
+
Args:
|
131
|
+
pipeline (str): name of the Laminar pipeline.
|
132
|
+
The pipeline must have a target version set.
|
133
|
+
inputs (dict[str, NodeInput]):
|
134
|
+
inputs to the endpoint's target pipeline.
|
135
|
+
Keys in the dictionary must match input node names
|
136
|
+
env (dict[str, str], optional):
|
137
|
+
Environment variables for the pipeline execution.
|
138
|
+
Defaults to {}.
|
139
|
+
metadata (dict[str, str], optional):
|
140
|
+
any custom metadata to be stored
|
141
|
+
with execution trace. Defaults to {}.
|
142
|
+
parent_span_id (Optional[uuid.UUID], optional):
|
143
|
+
parent span id for the resulting span.
|
144
|
+
Defaults to None.
|
145
|
+
trace_id (Optional[uuid.UUID], optional):
|
146
|
+
trace id for the resulting trace.
|
147
|
+
Defaults to None.
|
148
|
+
|
149
|
+
Returns:
|
150
|
+
PipelineRunResponse: response object containing the outputs
|
151
|
+
|
152
|
+
Raises:
|
153
|
+
ValueError: if project API key is not set
|
154
|
+
PipelineRunError: if the endpoint run fails
|
155
|
+
"""
|
156
|
+
if cls.__project_api_key is None:
|
157
|
+
raise ValueError(
|
158
|
+
"Please initialize the Laminar object with your project "
|
159
|
+
"API key or set the LMNR_PROJECT_API_KEY environment variable"
|
160
|
+
)
|
161
|
+
try:
|
162
|
+
current_span = get_current_span()
|
163
|
+
if current_span != INVALID_SPAN:
|
164
|
+
parent_span_id = parent_span_id or uuid.UUID(
|
165
|
+
int=current_span.get_span_context().span_id
|
166
|
+
)
|
167
|
+
trace_id = trace_id or uuid.UUID(
|
168
|
+
int=current_span.get_span_context().trace_id
|
169
|
+
)
|
170
|
+
request = PipelineRunRequest(
|
171
|
+
inputs=inputs,
|
172
|
+
pipeline=pipeline,
|
173
|
+
env=env,
|
174
|
+
metadata=metadata,
|
175
|
+
parent_span_id=parent_span_id,
|
176
|
+
trace_id=trace_id,
|
177
|
+
)
|
178
|
+
except Exception as e:
|
179
|
+
raise ValueError(f"Invalid request: {e}")
|
180
|
+
|
181
|
+
response = requests.post(
|
182
|
+
cls.__base_url + "/v1/pipeline/run",
|
183
|
+
data=json.dumps(request.to_dict()),
|
184
|
+
headers=cls._headers(),
|
185
|
+
)
|
186
|
+
if response.status_code != 200:
|
187
|
+
raise PipelineRunError(response)
|
188
|
+
try:
|
189
|
+
resp_json = response.json()
|
190
|
+
keys = list(resp_json.keys())
|
191
|
+
for key in keys:
|
192
|
+
value = resp_json[key]
|
193
|
+
del resp_json[key]
|
194
|
+
resp_json[to_snake(key)] = value
|
195
|
+
return PipelineRunResponse(**resp_json)
|
196
|
+
except Exception:
|
197
|
+
raise PipelineRunError(response)
|
198
|
+
|
199
|
+
@classmethod
|
200
|
+
def event(
|
201
|
+
cls,
|
202
|
+
name: str,
|
203
|
+
value: AttributeValue,
|
204
|
+
timestamp: Optional[Union[datetime.datetime, int]] = None,
|
205
|
+
):
|
206
|
+
"""Associate an event with the current span
|
207
|
+
|
208
|
+
Args:
|
209
|
+
name (str): event name
|
210
|
+
value (AttributeValue): event value. Must be a primitive type
|
211
|
+
or a sequence of values of the same primitive type
|
212
|
+
timestamp (Optional[Union[datetime.datetime, int]], optional):
|
213
|
+
If int, must be epoch nanoseconds. If not
|
214
|
+
specified, relies on the underlying OpenTelemetry
|
215
|
+
implementation. Defaults to None.
|
216
|
+
"""
|
217
|
+
if timestamp and isinstance(timestamp, datetime.datetime):
|
218
|
+
timestamp = int(timestamp.timestamp() * 1e9)
|
219
|
+
|
220
|
+
event = {
|
221
|
+
"lmnr.event.type": "default",
|
222
|
+
"lmnr.event.value": value,
|
223
|
+
}
|
224
|
+
|
225
|
+
current_span = get_current_span()
|
226
|
+
if current_span == INVALID_SPAN:
|
227
|
+
cls.__logger.warning(
|
228
|
+
"`Laminar().event()` called outside of span context. "
|
229
|
+
f"Event '{name}' will not be recorded in the trace. "
|
230
|
+
"Make sure to annotate the function with a decorator"
|
231
|
+
)
|
232
|
+
return
|
233
|
+
|
234
|
+
current_span.add_event(name, event, timestamp)
|
235
|
+
|
236
|
+
@classmethod
|
237
|
+
def evaluate_event(
|
238
|
+
cls,
|
239
|
+
name: str,
|
240
|
+
evaluator: str,
|
241
|
+
data: dict[str, AttributeValue],
|
242
|
+
env: Optional[dict[str, str]] = {},
|
243
|
+
timestamp: Optional[Union[datetime.datetime, int]] = None,
|
244
|
+
):
|
245
|
+
"""Send an event for evaluation to the Laminar backend
|
246
|
+
|
247
|
+
Args:
|
248
|
+
name (str): name of the event
|
249
|
+
evaluator (str): name of the pipeline that evaluates the event.
|
250
|
+
The pipeline must have a target version set.
|
251
|
+
data (dict[str, AttributeValue]): map from input node name to
|
252
|
+
its value in the evaluator pipeline
|
253
|
+
env (dict[str, str], optional): environment variables required
|
254
|
+
to run the pipeline. Defaults to {}.
|
255
|
+
timestamp (Optional[Union[datetime.datetime, int]], optional):
|
256
|
+
If int, must be epoch nanoseconds.
|
257
|
+
If not specified, relies on the underlying
|
258
|
+
OpenTelemetry implementation. Defaults to None.
|
259
|
+
"""
|
260
|
+
if timestamp and isinstance(timestamp, datetime.datetime):
|
261
|
+
timestamp = int(timestamp.timestamp() * 1e9)
|
262
|
+
event = {
|
263
|
+
"lmnr.event.type": "evaluate",
|
264
|
+
"lmnr.event.evaluator": evaluator,
|
265
|
+
"lmnr.event.data": json.dumps(data),
|
266
|
+
"lmnr.event.env": json.dumps(env if env is not None else cls.__env),
|
267
|
+
}
|
268
|
+
current_span = get_current_span()
|
269
|
+
if current_span == INVALID_SPAN:
|
270
|
+
cls.__logger.warning(
|
271
|
+
"`Laminar().evaluate_event()` called outside of span context."
|
272
|
+
f"Event '{name}' will not be recorded in the trace. "
|
273
|
+
"Make sure to annotate the function with a decorator"
|
274
|
+
)
|
275
|
+
return
|
276
|
+
|
277
|
+
current_span.add_event(name, event)
|
278
|
+
|
279
|
+
@classmethod
|
280
|
+
def start_span(
|
281
|
+
cls,
|
282
|
+
name: str,
|
283
|
+
input: Any = None,
|
284
|
+
) -> Tuple[Span, object]:
|
285
|
+
"""Start a new span with the given name. Useful for manual
|
286
|
+
instrumentation.
|
287
|
+
|
288
|
+
Args:
|
289
|
+
name (str): name of the span
|
290
|
+
input (Any, optional): input to the span. Will be sent as an
|
291
|
+
attribute, so must be json serializable. Defaults to None.
|
292
|
+
|
293
|
+
Returns:
|
294
|
+
Tuple[Span, object]: Span - the started span, object -
|
295
|
+
context token
|
296
|
+
that must be passed to `end_span` to end the span.
|
297
|
+
|
298
|
+
"""
|
299
|
+
with get_tracer() as tracer:
|
300
|
+
span = tracer.start_span(name)
|
301
|
+
ctx = set_span_in_context(span)
|
302
|
+
token = context.attach(ctx)
|
303
|
+
span.set_attribute(SpanAttributes.TRACELOOP_ENTITY_NAME, name)
|
304
|
+
if input is not None:
|
305
|
+
span.set_attribute(
|
306
|
+
SpanAttributes.TRACELOOP_ENTITY_INPUT, json.dumps({"input": input})
|
307
|
+
)
|
308
|
+
return (span, token)
|
309
|
+
|
310
|
+
@classmethod
|
311
|
+
def end_span(cls, span: Span, token: object, output: Any = None):
|
312
|
+
"""End the span started with `start_span`
|
313
|
+
|
314
|
+
Args:
|
315
|
+
span (Span): span returned by `start_span`
|
316
|
+
token (object): context token returned by `start_span`
|
317
|
+
output (Any, optional): output of the span. Will be sent as an
|
318
|
+
attribute, so must be json serializable. Defaults to None.
|
319
|
+
"""
|
320
|
+
if output is not None:
|
321
|
+
span.set_attribute(
|
322
|
+
SpanAttributes.TRACELOOP_ENTITY_OUTPUT, json.dumps({"output": output})
|
323
|
+
)
|
324
|
+
span.end()
|
325
|
+
context.detach(token)
|
326
|
+
|
327
|
+
@classmethod
|
328
|
+
def set_session(
|
329
|
+
cls,
|
330
|
+
session_id: Optional[str] = None,
|
331
|
+
user_id: Optional[str] = None,
|
332
|
+
):
|
333
|
+
"""Set the session and user id for the current span and the context
|
334
|
+
(i.e. any children spans created from the current span in the current
|
335
|
+
thread).
|
336
|
+
|
337
|
+
Args:
|
338
|
+
session_id (Optional[str], optional): Custom session id.
|
339
|
+
Useful to debug and group long-running
|
340
|
+
sessions/conversations.
|
341
|
+
Defaults to None.
|
342
|
+
user_id (Optional[str], optional): Custom user id.
|
343
|
+
Useful for grouping spans or traces by user.
|
344
|
+
Defaults to None.
|
345
|
+
"""
|
346
|
+
current_span = get_current_span()
|
347
|
+
if current_span != INVALID_SPAN:
|
348
|
+
cls.__logger.debug(
|
349
|
+
"Laminar().set_session() called inside a span context. Setting"
|
350
|
+
" it manually in the current span."
|
351
|
+
)
|
352
|
+
if session_id is not None:
|
353
|
+
current_span.set_attribute(
|
354
|
+
"traceloop.association.properties.session_id", session_id
|
355
|
+
)
|
356
|
+
if user_id is not None:
|
357
|
+
current_span.set_attribute(
|
358
|
+
"traceloop.association.properties.user_id", user_id
|
359
|
+
)
|
360
|
+
association_properties = {}
|
361
|
+
if session_id is not None:
|
362
|
+
association_properties["session_id"] = session_id
|
363
|
+
if user_id is not None:
|
364
|
+
association_properties["user_id"] = user_id
|
365
|
+
Traceloop.set_association_properties(association_properties)
|
366
|
+
|
367
|
+
@classmethod
|
368
|
+
def clear_session(cls):
|
369
|
+
"""Clear the session and user id from the context"""
|
370
|
+
props: dict = copy.copy(context.get_value("association_properties"))
|
371
|
+
props.pop("session_id", None)
|
372
|
+
props.pop("user_id", None)
|
373
|
+
Traceloop.set_association_properties(props)
|
374
|
+
|
375
|
+
@classmethod
|
376
|
+
def _headers(cls):
|
377
|
+
return {
|
378
|
+
"Authorization": "Bearer " + cls.__project_api_key,
|
379
|
+
"Content-Type": "application/json",
|
380
|
+
}
|
lmnr/sdk/log.py
ADDED
@@ -0,0 +1,39 @@
|
|
1
|
+
import logging
|
2
|
+
|
3
|
+
|
4
|
+
class CustomFormatter(logging.Formatter):
|
5
|
+
grey = "\x1b[38;20m"
|
6
|
+
green = "\x1b[32;20m"
|
7
|
+
yellow = "\x1b[33;20m"
|
8
|
+
red = "\x1b[31;20m"
|
9
|
+
bold_red = "\x1b[31;1m"
|
10
|
+
reset = "\x1b[0m"
|
11
|
+
fmt = "%(asctime)s::%(name)s::%(levelname)s: %(message)s (%(filename)s:%(lineno)d)"
|
12
|
+
|
13
|
+
FORMATS = {
|
14
|
+
logging.DEBUG: grey + fmt + reset,
|
15
|
+
logging.INFO: green + fmt + reset,
|
16
|
+
logging.WARNING: yellow + fmt + reset,
|
17
|
+
logging.ERROR: red + fmt + reset,
|
18
|
+
logging.CRITICAL: bold_red + fmt + reset,
|
19
|
+
}
|
20
|
+
|
21
|
+
def format(self, record: logging.LogRecord):
|
22
|
+
log_fmt = self.FORMATS.get(record.levelno)
|
23
|
+
formatter = logging.Formatter(log_fmt)
|
24
|
+
return formatter.format(record)
|
25
|
+
|
26
|
+
|
27
|
+
# For StreamHandlers / console
|
28
|
+
class VerboseColorfulFormatter(CustomFormatter):
|
29
|
+
def format(self, record):
|
30
|
+
return super().format(record)
|
31
|
+
|
32
|
+
|
33
|
+
# For Verbose FileHandlers / files
|
34
|
+
class VerboseFormatter(CustomFormatter):
|
35
|
+
fmt = "%(asctime)s::%(name)s::%(levelname)s| %(message)s (%(filename)s:%(lineno)d)"
|
36
|
+
|
37
|
+
def format(self, record):
|
38
|
+
formatter = logging.Formatter(self.fmt)
|
39
|
+
return formatter.format(record)
|
lmnr/sdk/utils.py
CHANGED
@@ -9,8 +9,6 @@ import queue
|
|
9
9
|
import typing
|
10
10
|
import uuid
|
11
11
|
|
12
|
-
from .providers import Provider, OpenAI
|
13
|
-
|
14
12
|
|
15
13
|
def is_method(func: typing.Callable) -> bool:
|
16
14
|
# inspect.ismethod is True for bound methods only, but in the decorator,
|
@@ -24,17 +22,22 @@ def is_method(func: typing.Callable) -> bool:
|
|
24
22
|
|
25
23
|
|
26
24
|
def is_async(func: typing.Callable) -> bool:
|
27
|
-
# `__wrapped__` is set automatically by `functools.wraps` and
|
25
|
+
# `__wrapped__` is set automatically by `functools.wraps` and
|
26
|
+
# `functools.update_wrapper`
|
28
27
|
# so we can use it to get the original function
|
29
28
|
while hasattr(func, "__wrapped__"):
|
30
29
|
func = func.__wrapped__
|
31
30
|
|
31
|
+
if not inspect.isfunction(func):
|
32
|
+
return False
|
33
|
+
|
32
34
|
# Check if the function is asynchronous
|
33
35
|
if asyncio.iscoroutinefunction(func):
|
34
36
|
return True
|
35
37
|
|
36
|
-
# Fallback: check if the function's code object contains 'async'.
|
37
|
-
# cases when
|
38
|
+
# Fallback: check if the function's code object contains 'async'.
|
39
|
+
# This is for cases when a decorator did not properly use
|
40
|
+
# `functools.wraps` or `functools.update_wrapper`
|
38
41
|
CO_COROUTINE = inspect.CO_COROUTINE
|
39
42
|
return (func.__code__.co_flags & CO_COROUTINE) != 0
|
40
43
|
|
@@ -85,7 +88,8 @@ def get_input_from_func_args(
|
|
85
88
|
func_args: list[typing.Any] = [],
|
86
89
|
func_kwargs: dict[str, typing.Any] = {},
|
87
90
|
) -> dict[str, typing.Any]:
|
88
|
-
# Remove implicitly passed "self" or "cls" argument for
|
91
|
+
# Remove implicitly passed "self" or "cls" argument for
|
92
|
+
# instance or class methods
|
89
93
|
res = copy.deepcopy(func_kwargs)
|
90
94
|
for i, k in enumerate(inspect.signature(func).parameters.keys()):
|
91
95
|
if is_method and k in ["self", "cls"]:
|
@@ -94,8 +98,3 @@ def get_input_from_func_args(
|
|
94
98
|
if len(func_args) > i:
|
95
99
|
res[k] = func_args[i]
|
96
100
|
return res
|
97
|
-
|
98
|
-
|
99
|
-
PROVIDER_NAME_TO_OBJECT: dict[str, Provider] = {
|
100
|
-
"openai": OpenAI(),
|
101
|
-
}
|
@@ -0,0 +1,151 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: lmnr
|
3
|
+
Version: 0.4.0
|
4
|
+
Summary: Python SDK for Laminar AI
|
5
|
+
License: Apache-2.0
|
6
|
+
Author: lmnr.ai
|
7
|
+
Requires-Python: >=3.9,<4.0
|
8
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
10
|
+
Classifier: Programming Language :: Python :: 3.9
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
14
|
+
Requires-Dist: backoff (>=2.2.1,<3.0.0)
|
15
|
+
Requires-Dist: pydantic (>=2.7.4,<3.0.0)
|
16
|
+
Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
|
17
|
+
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
18
|
+
Requires-Dist: traceloop-sdk (>=0.29.2,<0.30.0)
|
19
|
+
Description-Content-Type: text/markdown
|
20
|
+
|
21
|
+
# Laminar Python
|
22
|
+
|
23
|
+
OpenTelemetry log sender for [Laminar](https://github.com/lmnr-ai/lmnr) for Python code.
|
24
|
+
|
25
|
+
<a href="https://pypi.org/project/lmnr/">  </a>
|
26
|
+

|
27
|
+

|
28
|
+
|
29
|
+
|
30
|
+
|
31
|
+
## Quickstart
|
32
|
+
```sh
|
33
|
+
python3 -m venv .myenv
|
34
|
+
source .myenv/bin/activate # or use your favorite env management tool
|
35
|
+
|
36
|
+
pip install lmnr
|
37
|
+
```
|
38
|
+
|
39
|
+
And the in your main Python file
|
40
|
+
|
41
|
+
```python
|
42
|
+
from lmnr import Laminar as L
|
43
|
+
|
44
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
|
45
|
+
```
|
46
|
+
|
47
|
+
This will automatically instrument most of the LLM, Vector DB, and related
|
48
|
+
calls with OpenTelemetry-compatible instrumentation.
|
49
|
+
|
50
|
+
We rely on the amazing [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
|
51
|
+
by TraceLoop, to achieve that.
|
52
|
+
|
53
|
+
### Project API key
|
54
|
+
|
55
|
+
Get the key from the settings page of your Laminar project ([Learn more](https://docs.lmnr.ai/api-reference/introduction#authentication)).
|
56
|
+
You can either pass it to `.initialize()` or set it to `.env` at the root of your package with the key `LMNR_PROJECT_API_KEY`.
|
57
|
+
|
58
|
+
## Instrumentation
|
59
|
+
|
60
|
+
In addition to automatic instrumentation, we provide a simple `@observe()` decorator, if you want more fine-grained tracing
|
61
|
+
or to trace other functions.
|
62
|
+
|
63
|
+
### Example
|
64
|
+
|
65
|
+
```python
|
66
|
+
import os
|
67
|
+
from openai import OpenAI
|
68
|
+
|
69
|
+
|
70
|
+
from lmnr import observe, Laminar as L
|
71
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
|
72
|
+
|
73
|
+
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
|
74
|
+
|
75
|
+
@observe() # annotate all functions you want to trace
|
76
|
+
def poem_writer(topic="turbulence"):
|
77
|
+
prompt = f"write a poem about {topic}"
|
78
|
+
response = client.chat.completions.create(
|
79
|
+
model="gpt-4o",
|
80
|
+
messages=[
|
81
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
82
|
+
{"role": "user", "content": prompt},
|
83
|
+
],
|
84
|
+
)
|
85
|
+
poem = response.choices[0].message.content
|
86
|
+
return poem
|
87
|
+
|
88
|
+
print(poem_writer(topic="laminar flow"))
|
89
|
+
```
|
90
|
+
|
91
|
+
|
92
|
+
## Sending events
|
93
|
+
|
94
|
+
You can send events in two ways:
|
95
|
+
- `.event(name, value)` – for a pre-defined event with one of possible values.
|
96
|
+
- `.evaluate_event(name, evaluator, data)` – for an event that is evaluated by evaluator pipeline based on the data.
|
97
|
+
|
98
|
+
Note that to run an evaluate event, you need to crate an evaluator pipeline and create a target version for it.
|
99
|
+
|
100
|
+
Read our [docs](https://docs.lmnr.ai) to learn more about event types and how they are created and evaluated.
|
101
|
+
|
102
|
+
### Example
|
103
|
+
|
104
|
+
```python
|
105
|
+
from lmnr import Laminar as L
|
106
|
+
# ...
|
107
|
+
poem = response.choices[0].message.content
|
108
|
+
|
109
|
+
# this will register True or False value with Laminar
|
110
|
+
L.event("topic alignment", topic in poem)
|
111
|
+
|
112
|
+
# this will run the pipeline `check_wordy` with `poem` set as the value
|
113
|
+
# of `text_input` node, and write the result as an event with name
|
114
|
+
# "excessive_wordiness"
|
115
|
+
L.evaluate_event("excessive_wordiness", "check_wordy", {"text_input": poem})
|
116
|
+
```
|
117
|
+
|
118
|
+
## Laminar pipelines as prompt chain managers
|
119
|
+
|
120
|
+
You can create Laminar pipelines in the UI and manage chains of LLM calls there.
|
121
|
+
|
122
|
+
After you are ready to use your pipeline in your code, deploy it in Laminar by selecting the target version for the pipeline.
|
123
|
+
|
124
|
+
Once your pipeline target is set, you can call it from Python in just a few lines.
|
125
|
+
|
126
|
+
Example use:
|
127
|
+
|
128
|
+
```python
|
129
|
+
from lmnr import Laminar as L
|
130
|
+
|
131
|
+
L.initialize('<YOUR_PROJECT_API_KEY>')
|
132
|
+
|
133
|
+
result = l.run(
|
134
|
+
pipeline = 'my_pipeline_name',
|
135
|
+
inputs = {'input_node_name': 'some_value'},
|
136
|
+
# all environment variables
|
137
|
+
env = {'OPENAI_API_KEY': 'sk-some-key'},
|
138
|
+
)
|
139
|
+
```
|
140
|
+
|
141
|
+
Resulting in:
|
142
|
+
|
143
|
+
```python
|
144
|
+
>>> result
|
145
|
+
PipelineRunResponse(
|
146
|
+
outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}},
|
147
|
+
# useful to locate your trace
|
148
|
+
run_id='53b012d5-5759-48a6-a9c5-0011610e3669'
|
149
|
+
)
|
150
|
+
```
|
151
|
+
|
@@ -0,0 +1,12 @@
|
|
1
|
+
lmnr/__init__.py,sha256=LDr-OWinRQz-KjzXAotEzUNoi59QoZi3MMll-vcAE8Y,154
|
2
|
+
lmnr/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
+
lmnr/sdk/decorators.py,sha256=Xs6n0TGX9LZ9i1hE_UZz4LEyd_ZAfpVGfNQh_rKwOuA,2493
|
4
|
+
lmnr/sdk/laminar.py,sha256=Ae5w6no2SqM6Zgp9aVJ1kvQUKbgeKX-1fBTovdfElZo,14197
|
5
|
+
lmnr/sdk/log.py,sha256=EgAMY77Zn1bv1imCqrmflD3imoAJ2yveOkIcrIP3e98,1170
|
6
|
+
lmnr/sdk/types.py,sha256=5-Ft-l35wtmn2xxE8BTqsM3nx1zD799tRv4qiOkED50,2121
|
7
|
+
lmnr/sdk/utils.py,sha256=ZsGJ86tq8lIbvOhSb1gJWH5K3GylO_lgX68FN6rG2nM,3358
|
8
|
+
lmnr-0.4.0.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
|
9
|
+
lmnr-0.4.0.dist-info/METADATA,sha256=cRoKKpLeNNk6E3yxNzLHvGmaStrmCaQXCUAEr-Ix7Dg,4548
|
10
|
+
lmnr-0.4.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
11
|
+
lmnr-0.4.0.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
|
12
|
+
lmnr-0.4.0.dist-info/RECORD,,
|