lmnr 0.4.9__tar.gz → 0.4.10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lmnr-0.4.9 → lmnr-0.4.10}/PKG-INFO +29 -36
- {lmnr-0.4.9 → lmnr-0.4.10}/README.md +28 -35
- {lmnr-0.4.9 → lmnr-0.4.10}/pyproject.toml +2 -2
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/__init__.py +1 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/LICENSE +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/decorators.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/evaluations.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/laminar.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/log.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/types.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/utils.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/.flake8 +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/.python-version +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/config/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/decorators/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/decorators/base.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/instruments.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/metrics/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/metrics/metrics.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_and_external_association_properties.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_association_properties.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_manual/test_manual_report.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_manual/test_resource_attributes.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_privacy_no_prompts/test_simple_workflow.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_prompt_management/test_prompt_management.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_sdk_initialization/test_resource_attributes.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_tasks/test_task_io_serialization_with_langchain.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_aworkflow.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_workflow.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_streaming_workflow.yaml +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/conftest.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_association_properties.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_manual.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_nested_tasks.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_privacy_no_prompts.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_sdk_initialization.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_tasks.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_workflows.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/content_allow_list.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/context_manager.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/manual.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/tracing.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/__init__.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/in_memory_span_exporter.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/json_encoder.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/package_check.py +0 -0
- {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/version.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lmnr
|
3
|
-
Version: 0.4.
|
3
|
+
Version: 0.4.10
|
4
4
|
Summary: Python SDK for Laminar AI
|
5
5
|
License: Apache-2.0
|
6
6
|
Author: lmnr.ai
|
@@ -77,18 +77,19 @@ pip install lmnr
|
|
77
77
|
And the in your main Python file
|
78
78
|
|
79
79
|
```python
|
80
|
-
from lmnr import Laminar as L
|
80
|
+
from lmnr import Laminar as L, Instruments
|
81
81
|
|
82
|
-
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=
|
82
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
|
83
83
|
```
|
84
84
|
|
85
85
|
If you want to automatically instrument particular LLM, Vector DB, and related
|
86
86
|
calls with OpenTelemetry-compatible instrumentation, then pass the appropriate instruments to `.initialize()`.
|
87
87
|
|
88
|
+
You can pass an empty set as `instruments=set()` to disable any kind of automatic instrumentation.
|
88
89
|
Also if you want to automatically instrument all supported libraries, then pass `instruments=None` or don't pass `instruments` at all.
|
89
90
|
|
90
|
-
|
91
|
-
by TraceLoop, to
|
91
|
+
Our code is based on the [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
|
92
|
+
by TraceLoop. Also, we are grateful to Traceloop for implementing autoinstrumentations for many libraries.
|
92
93
|
|
93
94
|
### Project API key
|
94
95
|
|
@@ -107,8 +108,8 @@ import os
|
|
107
108
|
from openai import OpenAI
|
108
109
|
|
109
110
|
|
110
|
-
from lmnr import observe, Laminar as L
|
111
|
-
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=
|
111
|
+
from lmnr import observe, Laminar as L, Instruments
|
112
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
|
112
113
|
|
113
114
|
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
|
114
115
|
|
@@ -130,40 +131,32 @@ print(poem_writer(topic="laminar flow"))
|
|
130
131
|
|
131
132
|
### Manual instrumentation
|
132
133
|
|
133
|
-
|
134
|
-
`trace.start_span`. Our wrapper sets the span into the active context.
|
135
|
-
You don't have to explicitly pass the spans around, it is enough to
|
136
|
-
just call `L.start_span`, and OpenTelemetry will handle the context management
|
134
|
+
Also, you can `Laminar.start_as_current_span` if you want to record a chunk of your code.
|
137
135
|
|
138
136
|
```python
|
139
|
-
from lmnr import observe, Laminar as L
|
140
|
-
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=
|
137
|
+
from lmnr import observe, Laminar as L, Instruments
|
138
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
|
141
139
|
|
142
140
|
def poem_writer(topic="turbulence"):
|
143
|
-
|
144
|
-
span = L.start_span("poem_writer", topic) # start a span
|
145
|
-
|
146
141
|
prompt = f"write a poem about {topic}"
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
return poem
|
142
|
+
messages = [
|
143
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
144
|
+
{"role": "user", "content": prompt},
|
145
|
+
]
|
146
|
+
|
147
|
+
with L.start_as_current_span(name="poem_writer", input=messages):
|
148
|
+
# OpenAI calls are still automatically instrumented with OpenLLMetry
|
149
|
+
response = client.chat.completions.create(
|
150
|
+
model="gpt-4o",
|
151
|
+
messages=messages,
|
152
|
+
)
|
153
|
+
poem = response.choices[0].message.content
|
154
|
+
# while within the span, you can attach laminar events to it
|
155
|
+
L.event("event_name", "event_value")
|
156
|
+
|
157
|
+
L.set_span_output(poem) # set an output
|
158
|
+
|
159
|
+
return poem
|
167
160
|
```
|
168
161
|
|
169
162
|
|
@@ -19,18 +19,19 @@ pip install lmnr
|
|
19
19
|
And the in your main Python file
|
20
20
|
|
21
21
|
```python
|
22
|
-
from lmnr import Laminar as L
|
22
|
+
from lmnr import Laminar as L, Instruments
|
23
23
|
|
24
|
-
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=
|
24
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
|
25
25
|
```
|
26
26
|
|
27
27
|
If you want to automatically instrument particular LLM, Vector DB, and related
|
28
28
|
calls with OpenTelemetry-compatible instrumentation, then pass the appropriate instruments to `.initialize()`.
|
29
29
|
|
30
|
+
You can pass an empty set as `instruments=set()` to disable any kind of automatic instrumentation.
|
30
31
|
Also if you want to automatically instrument all supported libraries, then pass `instruments=None` or don't pass `instruments` at all.
|
31
32
|
|
32
|
-
|
33
|
-
by TraceLoop, to
|
33
|
+
Our code is based on the [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
|
34
|
+
by TraceLoop. Also, we are grateful to Traceloop for implementing autoinstrumentations for many libraries.
|
34
35
|
|
35
36
|
### Project API key
|
36
37
|
|
@@ -49,8 +50,8 @@ import os
|
|
49
50
|
from openai import OpenAI
|
50
51
|
|
51
52
|
|
52
|
-
from lmnr import observe, Laminar as L
|
53
|
-
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=
|
53
|
+
from lmnr import observe, Laminar as L, Instruments
|
54
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
|
54
55
|
|
55
56
|
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
|
56
57
|
|
@@ -72,40 +73,32 @@ print(poem_writer(topic="laminar flow"))
|
|
72
73
|
|
73
74
|
### Manual instrumentation
|
74
75
|
|
75
|
-
|
76
|
-
`trace.start_span`. Our wrapper sets the span into the active context.
|
77
|
-
You don't have to explicitly pass the spans around, it is enough to
|
78
|
-
just call `L.start_span`, and OpenTelemetry will handle the context management
|
76
|
+
Also, you can `Laminar.start_as_current_span` if you want to record a chunk of your code.
|
79
77
|
|
80
78
|
```python
|
81
|
-
from lmnr import observe, Laminar as L
|
82
|
-
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=
|
79
|
+
from lmnr import observe, Laminar as L, Instruments
|
80
|
+
L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
|
83
81
|
|
84
82
|
def poem_writer(topic="turbulence"):
|
85
|
-
|
86
|
-
span = L.start_span("poem_writer", topic) # start a span
|
87
|
-
|
88
83
|
prompt = f"write a poem about {topic}"
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
return poem
|
84
|
+
messages = [
|
85
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
86
|
+
{"role": "user", "content": prompt},
|
87
|
+
]
|
88
|
+
|
89
|
+
with L.start_as_current_span(name="poem_writer", input=messages):
|
90
|
+
# OpenAI calls are still automatically instrumented with OpenLLMetry
|
91
|
+
response = client.chat.completions.create(
|
92
|
+
model="gpt-4o",
|
93
|
+
messages=messages,
|
94
|
+
)
|
95
|
+
poem = response.choices[0].message.content
|
96
|
+
# while within the span, you can attach laminar events to it
|
97
|
+
L.event("event_name", "event_value")
|
98
|
+
|
99
|
+
L.set_span_output(poem) # set an output
|
100
|
+
|
101
|
+
return poem
|
109
102
|
```
|
110
103
|
|
111
104
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "lmnr"
|
3
|
-
version = "0.4.
|
3
|
+
version = "0.4.10"
|
4
4
|
description = "Python SDK for Laminar AI"
|
5
5
|
authors = [
|
6
6
|
{ name = "lmnr.ai", email = "founders@lmnr.ai" }
|
@@ -11,7 +11,7 @@ license = "Apache-2.0"
|
|
11
11
|
|
12
12
|
[tool.poetry]
|
13
13
|
name = "lmnr"
|
14
|
-
version = "0.4.
|
14
|
+
version = "0.4.10"
|
15
15
|
description = "Python SDK for Laminar AI"
|
16
16
|
authors = ["lmnr.ai"]
|
17
17
|
readme = "README.md"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|