lmnr 0.3.6__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lmnr-0.4.0/PKG-INFO ADDED
@@ -0,0 +1,151 @@
1
+ Metadata-Version: 2.1
2
+ Name: lmnr
3
+ Version: 0.4.0
4
+ Summary: Python SDK for Laminar AI
5
+ License: Apache-2.0
6
+ Author: lmnr.ai
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: License :: OSI Approved :: Apache Software License
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: Programming Language :: Python :: 3.9
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Dist: backoff (>=2.2.1,<3.0.0)
15
+ Requires-Dist: pydantic (>=2.7.4,<3.0.0)
16
+ Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
17
+ Requires-Dist: requests (>=2.32.3,<3.0.0)
18
+ Requires-Dist: traceloop-sdk (>=0.29.2,<0.30.0)
19
+ Description-Content-Type: text/markdown
20
+
21
+ # Laminar Python
22
+
23
+ OpenTelemetry log sender for [Laminar](https://github.com/lmnr-ai/lmnr) for Python code.
24
+
25
+ <a href="https://pypi.org/project/lmnr/"> ![PyPI - Version](https://img.shields.io/pypi/v/lmnr?label=lmnr&logo=pypi&logoColor=3775A9) </a>
26
+ ![PyPI - Downloads](https://img.shields.io/pypi/dm/lmnr)
27
+ ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/lmnr)
28
+
29
+
30
+
31
+ ## Quickstart
32
+ ```sh
33
+ python3 -m venv .myenv
34
+ source .myenv/bin/activate # or use your favorite env management tool
35
+
36
+ pip install lmnr
37
+ ```
38
+
39
+ And the in your main Python file
40
+
41
+ ```python
42
+ from lmnr import Laminar as L
43
+
44
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
45
+ ```
46
+
47
+ This will automatically instrument most of the LLM, Vector DB, and related
48
+ calls with OpenTelemetry-compatible instrumentation.
49
+
50
+ We rely on the amazing [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
51
+ by TraceLoop, to achieve that.
52
+
53
+ ### Project API key
54
+
55
+ Get the key from the settings page of your Laminar project ([Learn more](https://docs.lmnr.ai/api-reference/introduction#authentication)).
56
+ You can either pass it to `.initialize()` or set it to `.env` at the root of your package with the key `LMNR_PROJECT_API_KEY`.
57
+
58
+ ## Instrumentation
59
+
60
+ In addition to automatic instrumentation, we provide a simple `@observe()` decorator, if you want more fine-grained tracing
61
+ or to trace other functions.
62
+
63
+ ### Example
64
+
65
+ ```python
66
+ import os
67
+ from openai import OpenAI
68
+
69
+
70
+ from lmnr import observe, Laminar as L
71
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
72
+
73
+ client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
74
+
75
+ @observe() # annotate all functions you want to trace
76
+ def poem_writer(topic="turbulence"):
77
+ prompt = f"write a poem about {topic}"
78
+ response = client.chat.completions.create(
79
+ model="gpt-4o",
80
+ messages=[
81
+ {"role": "system", "content": "You are a helpful assistant."},
82
+ {"role": "user", "content": prompt},
83
+ ],
84
+ )
85
+ poem = response.choices[0].message.content
86
+ return poem
87
+
88
+ print(poem_writer(topic="laminar flow"))
89
+ ```
90
+
91
+
92
+ ## Sending events
93
+
94
+ You can send events in two ways:
95
+ - `.event(name, value)` – for a pre-defined event with one of possible values.
96
+ - `.evaluate_event(name, evaluator, data)` – for an event that is evaluated by evaluator pipeline based on the data.
97
+
98
+ Note that to run an evaluate event, you need to crate an evaluator pipeline and create a target version for it.
99
+
100
+ Read our [docs](https://docs.lmnr.ai) to learn more about event types and how they are created and evaluated.
101
+
102
+ ### Example
103
+
104
+ ```python
105
+ from lmnr import Laminar as L
106
+ # ...
107
+ poem = response.choices[0].message.content
108
+
109
+ # this will register True or False value with Laminar
110
+ L.event("topic alignment", topic in poem)
111
+
112
+ # this will run the pipeline `check_wordy` with `poem` set as the value
113
+ # of `text_input` node, and write the result as an event with name
114
+ # "excessive_wordiness"
115
+ L.evaluate_event("excessive_wordiness", "check_wordy", {"text_input": poem})
116
+ ```
117
+
118
+ ## Laminar pipelines as prompt chain managers
119
+
120
+ You can create Laminar pipelines in the UI and manage chains of LLM calls there.
121
+
122
+ After you are ready to use your pipeline in your code, deploy it in Laminar by selecting the target version for the pipeline.
123
+
124
+ Once your pipeline target is set, you can call it from Python in just a few lines.
125
+
126
+ Example use:
127
+
128
+ ```python
129
+ from lmnr import Laminar as L
130
+
131
+ L.initialize('<YOUR_PROJECT_API_KEY>')
132
+
133
+ result = l.run(
134
+ pipeline = 'my_pipeline_name',
135
+ inputs = {'input_node_name': 'some_value'},
136
+ # all environment variables
137
+ env = {'OPENAI_API_KEY': 'sk-some-key'},
138
+ )
139
+ ```
140
+
141
+ Resulting in:
142
+
143
+ ```python
144
+ >>> result
145
+ PipelineRunResponse(
146
+ outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}},
147
+ # useful to locate your trace
148
+ run_id='53b012d5-5759-48a6-a9c5-0011610e3669'
149
+ )
150
+ ```
151
+
lmnr-0.4.0/README.md ADDED
@@ -0,0 +1,130 @@
1
+ # Laminar Python
2
+
3
+ OpenTelemetry log sender for [Laminar](https://github.com/lmnr-ai/lmnr) for Python code.
4
+
5
+ <a href="https://pypi.org/project/lmnr/"> ![PyPI - Version](https://img.shields.io/pypi/v/lmnr?label=lmnr&logo=pypi&logoColor=3775A9) </a>
6
+ ![PyPI - Downloads](https://img.shields.io/pypi/dm/lmnr)
7
+ ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/lmnr)
8
+
9
+
10
+
11
+ ## Quickstart
12
+ ```sh
13
+ python3 -m venv .myenv
14
+ source .myenv/bin/activate # or use your favorite env management tool
15
+
16
+ pip install lmnr
17
+ ```
18
+
19
+ And the in your main Python file
20
+
21
+ ```python
22
+ from lmnr import Laminar as L
23
+
24
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
25
+ ```
26
+
27
+ This will automatically instrument most of the LLM, Vector DB, and related
28
+ calls with OpenTelemetry-compatible instrumentation.
29
+
30
+ We rely on the amazing [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
31
+ by TraceLoop, to achieve that.
32
+
33
+ ### Project API key
34
+
35
+ Get the key from the settings page of your Laminar project ([Learn more](https://docs.lmnr.ai/api-reference/introduction#authentication)).
36
+ You can either pass it to `.initialize()` or set it to `.env` at the root of your package with the key `LMNR_PROJECT_API_KEY`.
37
+
38
+ ## Instrumentation
39
+
40
+ In addition to automatic instrumentation, we provide a simple `@observe()` decorator, if you want more fine-grained tracing
41
+ or to trace other functions.
42
+
43
+ ### Example
44
+
45
+ ```python
46
+ import os
47
+ from openai import OpenAI
48
+
49
+
50
+ from lmnr import observe, Laminar as L
51
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
52
+
53
+ client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
54
+
55
+ @observe() # annotate all functions you want to trace
56
+ def poem_writer(topic="turbulence"):
57
+ prompt = f"write a poem about {topic}"
58
+ response = client.chat.completions.create(
59
+ model="gpt-4o",
60
+ messages=[
61
+ {"role": "system", "content": "You are a helpful assistant."},
62
+ {"role": "user", "content": prompt},
63
+ ],
64
+ )
65
+ poem = response.choices[0].message.content
66
+ return poem
67
+
68
+ print(poem_writer(topic="laminar flow"))
69
+ ```
70
+
71
+
72
+ ## Sending events
73
+
74
+ You can send events in two ways:
75
+ - `.event(name, value)` – for a pre-defined event with one of possible values.
76
+ - `.evaluate_event(name, evaluator, data)` – for an event that is evaluated by evaluator pipeline based on the data.
77
+
78
+ Note that to run an evaluate event, you need to crate an evaluator pipeline and create a target version for it.
79
+
80
+ Read our [docs](https://docs.lmnr.ai) to learn more about event types and how they are created and evaluated.
81
+
82
+ ### Example
83
+
84
+ ```python
85
+ from lmnr import Laminar as L
86
+ # ...
87
+ poem = response.choices[0].message.content
88
+
89
+ # this will register True or False value with Laminar
90
+ L.event("topic alignment", topic in poem)
91
+
92
+ # this will run the pipeline `check_wordy` with `poem` set as the value
93
+ # of `text_input` node, and write the result as an event with name
94
+ # "excessive_wordiness"
95
+ L.evaluate_event("excessive_wordiness", "check_wordy", {"text_input": poem})
96
+ ```
97
+
98
+ ## Laminar pipelines as prompt chain managers
99
+
100
+ You can create Laminar pipelines in the UI and manage chains of LLM calls there.
101
+
102
+ After you are ready to use your pipeline in your code, deploy it in Laminar by selecting the target version for the pipeline.
103
+
104
+ Once your pipeline target is set, you can call it from Python in just a few lines.
105
+
106
+ Example use:
107
+
108
+ ```python
109
+ from lmnr import Laminar as L
110
+
111
+ L.initialize('<YOUR_PROJECT_API_KEY>')
112
+
113
+ result = l.run(
114
+ pipeline = 'my_pipeline_name',
115
+ inputs = {'input_node_name': 'some_value'},
116
+ # all environment variables
117
+ env = {'OPENAI_API_KEY': 'sk-some-key'},
118
+ )
119
+ ```
120
+
121
+ Resulting in:
122
+
123
+ ```python
124
+ >>> result
125
+ PipelineRunResponse(
126
+ outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}},
127
+ # useful to locate your trace
128
+ run_id='53b012d5-5759-48a6-a9c5-0011610e3669'
129
+ )
130
+ ```
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "lmnr"
3
- version = "0.3.6"
3
+ version = "0.4.0"
4
4
  description = "Python SDK for Laminar AI"
5
5
  authors = [
6
6
  { name = "lmnr.ai", email = "founders@lmnr.ai" }
@@ -11,7 +11,7 @@ license = "Apache-2.0"
11
11
 
12
12
  [tool.poetry]
13
13
  name = "lmnr"
14
- version = "0.3.6"
14
+ version = "0.4.0"
15
15
  description = "Python SDK for Laminar AI"
16
16
  authors = ["lmnr.ai"]
17
17
  readme = "README.md"
@@ -22,9 +22,8 @@ python = "^3.9"
22
22
  pydantic = "^2.7.4"
23
23
  requests = "^2.32.3"
24
24
  python-dotenv = "^1.0.1"
25
-
26
- openai = "^1.41.1"
27
25
  backoff = "^2.2.1"
26
+ traceloop-sdk = "^0.29.2"
28
27
 
29
28
  [tool.poetry.group.dev.dependencies]
30
29
  black = "^24.8.0"
@@ -0,0 +1,3 @@
1
+ from .sdk.laminar import Laminar
2
+ from .sdk.types import ChatMessage, PipelineRunError, PipelineRunResponse, NodeInput
3
+ from .sdk.decorators import observe
@@ -0,0 +1,72 @@
1
+ from traceloop.sdk.decorators.base import (
2
+ entity_method,
3
+ aentity_method,
4
+ )
5
+ from opentelemetry.trace import INVALID_SPAN, get_current_span
6
+ from traceloop.sdk import Traceloop
7
+
8
+ from typing import Callable, Optional, ParamSpec, TypeVar, cast
9
+
10
+ from .laminar import Laminar as L
11
+ from .utils import is_async
12
+
13
+ P = ParamSpec("P")
14
+ R = TypeVar("R")
15
+
16
+
17
+ def observe(
18
+ *,
19
+ name: Optional[str] = None,
20
+ user_id: Optional[str] = None,
21
+ session_id: Optional[str] = None,
22
+ ) -> Callable[[Callable[P, R]], Callable[P, R]]:
23
+ """The main decorator entrypoint for Laminar. This is used to wrap
24
+ functions and methods to create spans.
25
+
26
+ Args:
27
+ name (Optional[str], optional): Name of the span. Function
28
+ name is used if not specified.
29
+ Defaults to None.
30
+ user_id (Optional[str], optional): User ID to associate
31
+ with the span and the following context.
32
+ Defaults to None.
33
+ session_id (Optional[str], optional): Session ID to associate with the
34
+ span and the following context. Defaults to None.
35
+
36
+ Raises:
37
+ Exception: re-raises the exception if the wrapped function raises
38
+ an exception
39
+
40
+ Returns:
41
+ R: Returns the result of the wrapped function
42
+ """
43
+
44
+ def decorator(func: Callable[P, R]) -> Callable[P, R]:
45
+ if not L.is_initialized():
46
+ raise Exception(
47
+ "Laminar is not initialized. Please "
48
+ + "call Laminar.initialize() first."
49
+ )
50
+ current_span = get_current_span()
51
+ if current_span != INVALID_SPAN:
52
+ if session_id is not None:
53
+ current_span.set_attribute(
54
+ "traceloop.association.properties.session_id", session_id
55
+ )
56
+ if user_id is not None:
57
+ current_span.set_attribute(
58
+ "traceloop.association.properties.user_id", user_id
59
+ )
60
+ association_properties = {}
61
+ if session_id is not None:
62
+ association_properties["session_id"] = session_id
63
+ if user_id is not None:
64
+ association_properties["user_id"] = user_id
65
+ Traceloop.set_association_properties(association_properties)
66
+ return (
67
+ aentity_method(name=name)(func)
68
+ if is_async(func)
69
+ else entity_method(name=name)(func)
70
+ )
71
+
72
+ return cast(Callable[P, R], decorator)