lmnr 0.4.9__tar.gz → 0.4.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. {lmnr-0.4.9 → lmnr-0.4.10}/PKG-INFO +29 -36
  2. {lmnr-0.4.9 → lmnr-0.4.10}/README.md +28 -35
  3. {lmnr-0.4.9 → lmnr-0.4.10}/pyproject.toml +2 -2
  4. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/__init__.py +1 -0
  5. {lmnr-0.4.9 → lmnr-0.4.10}/LICENSE +0 -0
  6. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/__init__.py +0 -0
  7. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/decorators.py +0 -0
  8. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/evaluations.py +0 -0
  9. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/laminar.py +0 -0
  10. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/log.py +0 -0
  11. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/types.py +0 -0
  12. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/sdk/utils.py +0 -0
  13. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/.flake8 +0 -0
  14. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/.python-version +0 -0
  15. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/__init__.py +0 -0
  16. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/config/__init__.py +0 -0
  17. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/decorators/__init__.py +0 -0
  18. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/decorators/base.py +0 -0
  19. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/instruments.py +0 -0
  20. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/metrics/__init__.py +0 -0
  21. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/metrics/metrics.py +0 -0
  22. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/__init__.py +0 -0
  23. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_and_external_association_properties.yaml +0 -0
  24. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_association_properties.yaml +0 -0
  25. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_manual/test_manual_report.yaml +0 -0
  26. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_manual/test_resource_attributes.yaml +0 -0
  27. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_privacy_no_prompts/test_simple_workflow.yaml +0 -0
  28. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_prompt_management/test_prompt_management.yaml +0 -0
  29. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_sdk_initialization/test_resource_attributes.yaml +0 -0
  30. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_tasks/test_task_io_serialization_with_langchain.yaml +0 -0
  31. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_aworkflow.yaml +0 -0
  32. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_workflow.yaml +0 -0
  33. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_streaming_workflow.yaml +0 -0
  34. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/conftest.py +0 -0
  35. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_association_properties.py +0 -0
  36. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_manual.py +0 -0
  37. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_nested_tasks.py +0 -0
  38. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_privacy_no_prompts.py +0 -0
  39. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_sdk_initialization.py +0 -0
  40. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_tasks.py +0 -0
  41. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tests/test_workflows.py +0 -0
  42. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/__init__.py +0 -0
  43. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/content_allow_list.py +0 -0
  44. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/context_manager.py +0 -0
  45. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/manual.py +0 -0
  46. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/tracing/tracing.py +0 -0
  47. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/__init__.py +0 -0
  48. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/in_memory_span_exporter.py +0 -0
  49. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/json_encoder.py +0 -0
  50. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/utils/package_check.py +0 -0
  51. {lmnr-0.4.9 → lmnr-0.4.10}/src/lmnr/traceloop_sdk/version.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lmnr
3
- Version: 0.4.9
3
+ Version: 0.4.10
4
4
  Summary: Python SDK for Laminar AI
5
5
  License: Apache-2.0
6
6
  Author: lmnr.ai
@@ -77,18 +77,19 @@ pip install lmnr
77
77
  And the in your main Python file
78
78
 
79
79
  ```python
80
- from lmnr import Laminar as L
80
+ from lmnr import Laminar as L, Instruments
81
81
 
82
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=set())
82
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
83
83
  ```
84
84
 
85
85
  If you want to automatically instrument particular LLM, Vector DB, and related
86
86
  calls with OpenTelemetry-compatible instrumentation, then pass the appropriate instruments to `.initialize()`.
87
87
 
88
+ You can pass an empty set as `instruments=set()` to disable any kind of automatic instrumentation.
88
89
  Also if you want to automatically instrument all supported libraries, then pass `instruments=None` or don't pass `instruments` at all.
89
90
 
90
- We rely on the amazing [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
91
- by TraceLoop, to achieve that.
91
+ Our code is based on the [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
92
+ by TraceLoop. Also, we are grateful to Traceloop for implementing autoinstrumentations for many libraries.
92
93
 
93
94
  ### Project API key
94
95
 
@@ -107,8 +108,8 @@ import os
107
108
  from openai import OpenAI
108
109
 
109
110
 
110
- from lmnr import observe, Laminar as L
111
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=set())
111
+ from lmnr import observe, Laminar as L, Instruments
112
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
112
113
 
113
114
  client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
114
115
 
@@ -130,40 +131,32 @@ print(poem_writer(topic="laminar flow"))
130
131
 
131
132
  ### Manual instrumentation
132
133
 
133
- Our manual instrumentation is a very thin wrapper around OpenTelemetry's
134
- `trace.start_span`. Our wrapper sets the span into the active context.
135
- You don't have to explicitly pass the spans around, it is enough to
136
- just call `L.start_span`, and OpenTelemetry will handle the context management
134
+ Also, you can `Laminar.start_as_current_span` if you want to record a chunk of your code.
137
135
 
138
136
  ```python
139
- from lmnr import observe, Laminar as L
140
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=set())
137
+ from lmnr import observe, Laminar as L, Instruments
138
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
141
139
 
142
140
  def poem_writer(topic="turbulence"):
143
-
144
- span = L.start_span("poem_writer", topic) # start a span
145
-
146
141
  prompt = f"write a poem about {topic}"
147
-
148
- # OpenAI calls are still automatically instrumented with OpenLLMetry
149
- response = client.chat.completions.create(
150
- model="gpt-4o",
151
- messages=[
152
- {"role": "system", "content": "You are a helpful assistant."},
153
- {"role": "user", "content": prompt},
154
- ],
155
- )
156
- poem = response.choices[0].message.content
157
- # while within the span, you can attach laminar events to it
158
- L.event("event_name", "event_value")
159
-
160
- L.set_span_output(poem) # set an output
161
-
162
- # IMPORTANT: don't forget to end all the spans (usually in `finally` blocks)
163
- # Otherwise, the trace may not be sent/displayed correctly
164
- span.end()
165
-
166
- return poem
142
+ messages = [
143
+ {"role": "system", "content": "You are a helpful assistant."},
144
+ {"role": "user", "content": prompt},
145
+ ]
146
+
147
+ with L.start_as_current_span(name="poem_writer", input=messages):
148
+ # OpenAI calls are still automatically instrumented with OpenLLMetry
149
+ response = client.chat.completions.create(
150
+ model="gpt-4o",
151
+ messages=messages,
152
+ )
153
+ poem = response.choices[0].message.content
154
+ # while within the span, you can attach laminar events to it
155
+ L.event("event_name", "event_value")
156
+
157
+ L.set_span_output(poem) # set an output
158
+
159
+ return poem
167
160
  ```
168
161
 
169
162
 
@@ -19,18 +19,19 @@ pip install lmnr
19
19
  And the in your main Python file
20
20
 
21
21
  ```python
22
- from lmnr import Laminar as L
22
+ from lmnr import Laminar as L, Instruments
23
23
 
24
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=set())
24
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
25
25
  ```
26
26
 
27
27
  If you want to automatically instrument particular LLM, Vector DB, and related
28
28
  calls with OpenTelemetry-compatible instrumentation, then pass the appropriate instruments to `.initialize()`.
29
29
 
30
+ You can pass an empty set as `instruments=set()` to disable any kind of automatic instrumentation.
30
31
  Also if you want to automatically instrument all supported libraries, then pass `instruments=None` or don't pass `instruments` at all.
31
32
 
32
- We rely on the amazing [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
33
- by TraceLoop, to achieve that.
33
+ Our code is based on the [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
34
+ by TraceLoop. Also, we are grateful to Traceloop for implementing autoinstrumentations for many libraries.
34
35
 
35
36
  ### Project API key
36
37
 
@@ -49,8 +50,8 @@ import os
49
50
  from openai import OpenAI
50
51
 
51
52
 
52
- from lmnr import observe, Laminar as L
53
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=set())
53
+ from lmnr import observe, Laminar as L, Instruments
54
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
54
55
 
55
56
  client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
56
57
 
@@ -72,40 +73,32 @@ print(poem_writer(topic="laminar flow"))
72
73
 
73
74
  ### Manual instrumentation
74
75
 
75
- Our manual instrumentation is a very thin wrapper around OpenTelemetry's
76
- `trace.start_span`. Our wrapper sets the span into the active context.
77
- You don't have to explicitly pass the spans around, it is enough to
78
- just call `L.start_span`, and OpenTelemetry will handle the context management
76
+ Also, you can `Laminar.start_as_current_span` if you want to record a chunk of your code.
79
77
 
80
78
  ```python
81
- from lmnr import observe, Laminar as L
82
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments=set())
79
+ from lmnr import observe, Laminar as L, Instruments
80
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
83
81
 
84
82
  def poem_writer(topic="turbulence"):
85
-
86
- span = L.start_span("poem_writer", topic) # start a span
87
-
88
83
  prompt = f"write a poem about {topic}"
89
-
90
- # OpenAI calls are still automatically instrumented with OpenLLMetry
91
- response = client.chat.completions.create(
92
- model="gpt-4o",
93
- messages=[
94
- {"role": "system", "content": "You are a helpful assistant."},
95
- {"role": "user", "content": prompt},
96
- ],
97
- )
98
- poem = response.choices[0].message.content
99
- # while within the span, you can attach laminar events to it
100
- L.event("event_name", "event_value")
101
-
102
- L.set_span_output(poem) # set an output
103
-
104
- # IMPORTANT: don't forget to end all the spans (usually in `finally` blocks)
105
- # Otherwise, the trace may not be sent/displayed correctly
106
- span.end()
107
-
108
- return poem
84
+ messages = [
85
+ {"role": "system", "content": "You are a helpful assistant."},
86
+ {"role": "user", "content": prompt},
87
+ ]
88
+
89
+ with L.start_as_current_span(name="poem_writer", input=messages):
90
+ # OpenAI calls are still automatically instrumented with OpenLLMetry
91
+ response = client.chat.completions.create(
92
+ model="gpt-4o",
93
+ messages=messages,
94
+ )
95
+ poem = response.choices[0].message.content
96
+ # while within the span, you can attach laminar events to it
97
+ L.event("event_name", "event_value")
98
+
99
+ L.set_span_output(poem) # set an output
100
+
101
+ return poem
109
102
  ```
110
103
 
111
104
 
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "lmnr"
3
- version = "0.4.9"
3
+ version = "0.4.10"
4
4
  description = "Python SDK for Laminar AI"
5
5
  authors = [
6
6
  { name = "lmnr.ai", email = "founders@lmnr.ai" }
@@ -11,7 +11,7 @@ license = "Apache-2.0"
11
11
 
12
12
  [tool.poetry]
13
13
  name = "lmnr"
14
- version = "0.4.9"
14
+ version = "0.4.10"
15
15
  description = "Python SDK for Laminar AI"
16
16
  authors = ["lmnr.ai"]
17
17
  readme = "README.md"
@@ -2,3 +2,4 @@ from .sdk.evaluations import Evaluation
2
2
  from .sdk.laminar import Laminar
3
3
  from .sdk.types import ChatMessage, PipelineRunError, PipelineRunResponse, NodeInput
4
4
  from .sdk.decorators import observe
5
+ from .traceloop_sdk import Instruments
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes