lmnr 0.4.8__py3-none-any.whl → 0.4.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lmnr
3
- Version: 0.4.8
3
+ Version: 0.4.10
4
4
  Summary: Python SDK for Laminar AI
5
5
  License: Apache-2.0
6
6
  Author: lmnr.ai
@@ -11,11 +11,11 @@ Classifier: Programming Language :: Python :: 3.9
11
11
  Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
- Requires-Dist: asyncio (>=3.4.3,<4.0.0)
15
- Requires-Dist: backoff (>=2.2.1,<3.0.0)
16
- Requires-Dist: colorama (>=0.4.6,<0.5.0)
17
- Requires-Dist: deprecated (>=1.2.14,<2.0.0)
18
- Requires-Dist: jinja2 (>=3.1.2,<4.0.0)
14
+ Requires-Dist: asyncio (>=3.0,<4.0)
15
+ Requires-Dist: backoff (>=2.0,<3.0)
16
+ Requires-Dist: colorama (>=0.4,<0.5)
17
+ Requires-Dist: deprecated (>=1.0,<2.0)
18
+ Requires-Dist: jinja2 (>=3.0,<4.0)
19
19
  Requires-Dist: opentelemetry-api (>=1.27.0,<2.0.0)
20
20
  Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (>=1.26.0,<2.0.0)
21
21
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.26.0,<2.0.0)
@@ -49,11 +49,11 @@ Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.30.0,<0.31.0)
49
49
  Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.30.0,<0.31.0)
50
50
  Requires-Dist: opentelemetry-sdk (>=1.27.0,<2.0.0)
51
51
  Requires-Dist: opentelemetry-semantic-conventions-ai (==0.4.1)
52
- Requires-Dist: posthog (>3.0.2,<4)
53
- Requires-Dist: pydantic (>=2.7.4,<3.0.0)
54
- Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
55
- Requires-Dist: requests (>=2.32.3,<3.0.0)
56
- Requires-Dist: tenacity (>=8.2.3,<9.0.0)
52
+ Requires-Dist: posthog (>=3.0,<4.0)
53
+ Requires-Dist: pydantic (>=2.7,<3.0)
54
+ Requires-Dist: python-dotenv (>=1.0,<2.0)
55
+ Requires-Dist: requests (>=2.0,<3.0)
56
+ Requires-Dist: tenacity (>=8.0,<9.0)
57
57
  Description-Content-Type: text/markdown
58
58
 
59
59
  # Laminar Python
@@ -77,16 +77,19 @@ pip install lmnr
77
77
  And the in your main Python file
78
78
 
79
79
  ```python
80
- from lmnr import Laminar as L
80
+ from lmnr import Laminar as L, Instruments
81
81
 
82
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
82
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
83
83
  ```
84
84
 
85
- This will automatically instrument most of the LLM, Vector DB, and related
86
- calls with OpenTelemetry-compatible instrumentation.
85
+ If you want to automatically instrument particular LLM, Vector DB, and related
86
+ calls with OpenTelemetry-compatible instrumentation, then pass the appropriate instruments to `.initialize()`.
87
+
88
+ You can pass an empty set as `instruments=set()` to disable any kind of automatic instrumentation.
89
+ Also if you want to automatically instrument all supported libraries, then pass `instruments=None` or don't pass `instruments` at all.
87
90
 
88
- We rely on the amazing [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
89
- by TraceLoop, to achieve that.
91
+ Our code is based on the [OpenLLMetry](https://github.com/traceloop/openllmetry), open-source package
92
+ by TraceLoop. Also, we are grateful to Traceloop for implementing autoinstrumentations for many libraries.
90
93
 
91
94
  ### Project API key
92
95
 
@@ -105,8 +108,8 @@ import os
105
108
  from openai import OpenAI
106
109
 
107
110
 
108
- from lmnr import observe, Laminar as L
109
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
111
+ from lmnr import observe, Laminar as L, Instruments
112
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
110
113
 
111
114
  client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
112
115
 
@@ -128,40 +131,32 @@ print(poem_writer(topic="laminar flow"))
128
131
 
129
132
  ### Manual instrumentation
130
133
 
131
- Our manual instrumentation is a very thin wrapper around OpenTelemetry's
132
- `trace.start_span`. Our wrapper sets the span into the active context.
133
- You don't have to explicitly pass the spans around, it is enough to
134
- just call `L.start_span`, and OpenTelemetry will handle the context management
134
+ Also, you can `Laminar.start_as_current_span` if you want to record a chunk of your code.
135
135
 
136
136
  ```python
137
- from lmnr import observe, Laminar as L
138
- L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>")
137
+ from lmnr import observe, Laminar as L, Instruments
138
+ L.initialize(project_api_key="<LMNR_PROJECT_API_KEY>", instruments={Instruments.OPENAI})
139
139
 
140
140
  def poem_writer(topic="turbulence"):
141
-
142
- span = L.start_span("poem_writer", topic) # start a span
143
-
144
141
  prompt = f"write a poem about {topic}"
145
-
146
- # OpenAI calls are still automatically instrumented with OpenLLMetry
147
- response = client.chat.completions.create(
148
- model="gpt-4o",
149
- messages=[
150
- {"role": "system", "content": "You are a helpful assistant."},
151
- {"role": "user", "content": prompt},
152
- ],
153
- )
154
- poem = response.choices[0].message.content
155
- # while within the span, you can attach laminar events to it
156
- L.event("event_name", "event_value")
157
-
158
- L.set_span_output(span, poem) # set an output
159
-
160
- # IMPORTANT: don't forget to end all the spans (usually in `finally` blocks)
161
- # Otherwise, the trace may not be sent/displayed correctly
162
- span.end()
163
-
164
- return poem
142
+ messages = [
143
+ {"role": "system", "content": "You are a helpful assistant."},
144
+ {"role": "user", "content": prompt},
145
+ ]
146
+
147
+ with L.start_as_current_span(name="poem_writer", input=messages):
148
+ # OpenAI calls are still automatically instrumented with OpenLLMetry
149
+ response = client.chat.completions.create(
150
+ model="gpt-4o",
151
+ messages=messages,
152
+ )
153
+ poem = response.choices[0].message.content
154
+ # while within the span, you can attach laminar events to it
155
+ L.event("event_name", "event_value")
156
+
157
+ L.set_span_output(poem) # set an output
158
+
159
+ return poem
165
160
  ```
166
161
 
167
162
 
@@ -204,7 +199,7 @@ Example use:
204
199
  ```python
205
200
  from lmnr import Laminar as L
206
201
 
207
- L.initialize('<YOUR_PROJECT_API_KEY>')
202
+ L.initialize('<YOUR_PROJECT_API_KEY>', instruments=set())
208
203
 
209
204
  result = l.run(
210
205
  pipeline = 'my_pipeline_name',
@@ -1,22 +1,21 @@
1
- lmnr/__init__.py,sha256=wQwnHl662Xcz7GdSofFsEjmAK0nxioYA2Yq6Q78m4ps,194
1
+ lmnr/__init__.py,sha256=bA1f7JsEdSdU93HTz3SQLSanq-UgZGvb5I2OE0CWGR8,233
2
2
  lmnr/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- lmnr/sdk/decorators.py,sha256=0dLKWeKMmG_ryyK285GNsWBZSCWIyRhHgaDHlsJnDeM,2318
3
+ lmnr/sdk/decorators.py,sha256=W46diLcINe0HAhxktrjbfQnaIfklSb0AydBHHxiko9U,2314
4
4
  lmnr/sdk/evaluations.py,sha256=EaRcwbdXxj4w2yzak1xFv-YhDuxRVentQcJ-CypBoH0,6307
5
- lmnr/sdk/laminar.py,sha256=d5Tt_OyrhtZTfm_ehd3QZev7T6-AKTJU5xAan0-3yPA,20070
5
+ lmnr/sdk/laminar.py,sha256=M8HdP6ZYJHdngUVrGj4GMZxz_EZyx3woHm-UpfWmIvs,18439
6
6
  lmnr/sdk/log.py,sha256=EgAMY77Zn1bv1imCqrmflD3imoAJ2yveOkIcrIP3e98,1170
7
- lmnr/sdk/types.py,sha256=yTOoVHlg_wpce4Zx1ZSE3y7Qpwh9mcLCPKUi_1nfdk4,4071
7
+ lmnr/sdk/types.py,sha256=w7BJsoEPHiNps62cQt3Hd6tEZ7ZFCKRTPzcwdD6rNak,4050
8
8
  lmnr/sdk/utils.py,sha256=ZsGJ86tq8lIbvOhSb1gJWH5K3GylO_lgX68FN6rG2nM,3358
9
9
  lmnr/traceloop_sdk/.flake8,sha256=bCxuDlGx3YQ55QHKPiGJkncHanh9qGjQJUujcFa3lAU,150
10
10
  lmnr/traceloop_sdk/.python-version,sha256=9OLQBQVbD4zE4cJsPePhnAfV_snrPSoqEQw-PXgPMOs,6
11
- lmnr/traceloop_sdk/README.md,sha256=XIfEytq1qYs6Nn3dD0JP002doQI94-AA_mFR3R7_OC8,614
12
- lmnr/traceloop_sdk/__init__.py,sha256=bK8TNuIwInehlhyn11hK5ACx_k6o2G3Oj4nabsamlqE,4693
11
+ lmnr/traceloop_sdk/__init__.py,sha256=J-zVw6j0DmceVvJVZXAFcCzN_scz9hB3X17NQgPMgOg,4420
13
12
  lmnr/traceloop_sdk/config/__init__.py,sha256=EGN3ixOt_ORbMxqaQdLaC14kmO-gyG4mnGJ2GfN-R-E,364
14
- lmnr/traceloop_sdk/decorators/__init__.py,sha256=mU2eOvqpkkAd_ve56oQ52c8QOGbjDrjg8IJOTjXJJPg,3359
15
- lmnr/traceloop_sdk/decorators/base.py,sha256=ZkVXagdHNlrqC0kmhjMx6G9PENRMKp0KD5RNVzaf6l4,8184
13
+ lmnr/traceloop_sdk/decorators/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
14
+ lmnr/traceloop_sdk/decorators/base.py,sha256=wcqXF0iVQgRXMyWTcJ5QvL_6q2y_gttwsX8dllmAtWM,4891
16
15
  lmnr/traceloop_sdk/instruments.py,sha256=G5EFAbpc20WD3M6xK6rlbj-Yy_r_f1m3gidY6UXzSRQ,701
17
16
  lmnr/traceloop_sdk/metrics/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
17
  lmnr/traceloop_sdk/metrics/metrics.py,sha256=AlQ2a2os1WcZbfBd155u_UzBbPrbuPia6O_HbojV9Wc,5055
19
- lmnr/traceloop_sdk/tests/__init__.py,sha256=cepcpBRcyraU0ce09CYomy_qkHsCJBO9iePorZVkxBk,18
18
+ lmnr/traceloop_sdk/tests/__init__.py,sha256=RYnG0-8zbXL0-2Ste1mEBf5sN4d_rQjGTCgPBuaZC74,20
20
19
  lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_and_external_association_properties.yaml,sha256=26g0wRA0juicHg_XrhcE8H4vhs1lawDs0o0aLFn-I7w,3103
21
20
  lmnr/traceloop_sdk/tests/cassettes/test_association_properties/test_langchain_association_properties.yaml,sha256=FNlSWlYCsWc3w7UPZzfGjDnxS3gAOhL-kpsu4BTxsDE,3061
22
21
  lmnr/traceloop_sdk/tests/cassettes/test_manual/test_manual_report.yaml,sha256=iq_U_DBKNyM8mhgwGZrqw1OMalIb3g1422cqkvsrPHw,2956
@@ -28,26 +27,26 @@ lmnr/traceloop_sdk/tests/cassettes/test_tasks/test_task_io_serialization_with_la
28
27
  lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_aworkflow.yaml,sha256=xbol_wTn-SeEanT4kLY4Y2_HLCTZf0ZHRFkPw402gqI,2944
29
28
  lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_simple_workflow.yaml,sha256=ueiJY_6pyKQwbuMpeTAqHui4Ux7kYq_KTBZSob-cAjc,5866
30
29
  lmnr/traceloop_sdk/tests/cassettes/test_workflows/test_streaming_workflow.yaml,sha256=flOkiaW0DnQfD4rn_9F9dA0nIGMFjqwR3UzPyaanVjE,7947
31
- lmnr/traceloop_sdk/tests/conftest.py,sha256=_J8iz6z3CIf8Aj1UTlUtwZMsxUMtiS_-qrIUy4QNNJg,2873
32
- lmnr/traceloop_sdk/tests/test_association_properties.py,sha256=xHb-bEyF6qB2X3UxuBYmgqSg6qw7xvf6CzzkC2Bdr54,6331
33
- lmnr/traceloop_sdk/tests/test_manual.py,sha256=EtifedICZPtesGSzIqdAQCOITpydfcwxp-VaMRaajBc,1677
34
- lmnr/traceloop_sdk/tests/test_nested_tasks.py,sha256=QD5FKOj_ShJcgR09OV6HB97-P6D28CSljLy9UDcEXYI,1301
35
- lmnr/traceloop_sdk/tests/test_privacy_no_prompts.py,sha256=aJ8qSceWr85pxo6yZCTc0FE-FkiWnDylHUGiKc--jsA,1398
36
- lmnr/traceloop_sdk/tests/test_sdk_initialization.py,sha256=Dft8iJpjUcQBzF0ZfWokrfZhDBaim6WpFCion2I9FEI,1397
37
- lmnr/traceloop_sdk/tests/test_tasks.py,sha256=QDmanD1YiAzD3CGcCzCa-OyrOgewJbBJ0gx_EV219S0,858
38
- lmnr/traceloop_sdk/tests/test_workflows.py,sha256=zEIZjRE0D2uGSXHLSbLoU2JubuOZQqCUKCtWCovnbog,8751
39
- lmnr/traceloop_sdk/tracing/__init__.py,sha256=4C_umX0uMp--Gh-TPE4oeV3lvNzj2nKOBw516-yRqP0,131
30
+ lmnr/traceloop_sdk/tests/conftest.py,sha256=XG6U9T6OyQliJUS8sBTXStjIP7Bb6ZChWTt0fulvHuc,3039
31
+ lmnr/traceloop_sdk/tests/test_association_properties.py,sha256=D_LxhiVLwyXE0eW70uztmvZh3RRbVucM0L2C7O1oaFk,6735
32
+ lmnr/traceloop_sdk/tests/test_manual.py,sha256=YHmVYfDfFUalAzYQsz50PIzXdj3Jfjcl4erpBU4ae4A,1759
33
+ lmnr/traceloop_sdk/tests/test_nested_tasks.py,sha256=o7GPLVdgQOI5YH5HMBjInW4sprY4Nlu4SuwjMr1dxWU,1377
34
+ lmnr/traceloop_sdk/tests/test_privacy_no_prompts.py,sha256=lS8bwg_xOS4gcwwzrrWflMkTSn8eZWyTmAxL3h3hZA0,1478
35
+ lmnr/traceloop_sdk/tests/test_sdk_initialization.py,sha256=fRaf6lrxFzJIN94P1Tav_z_eywOsF5JXLPWzbJPMMyQ,1477
36
+ lmnr/traceloop_sdk/tests/test_tasks.py,sha256=xlEx8BKp4yG83SCjK5WkPGfyC33JSrx4h8VyjVwGbgw,906
37
+ lmnr/traceloop_sdk/tests/test_workflows.py,sha256=RVcfY3WAFIDZC15-aSua21aoQyYeWE7KypDyUsm-2EM,9372
38
+ lmnr/traceloop_sdk/tracing/__init__.py,sha256=Ckq7zCM26VdJVB5tIZv0GTPyMZKyfso_KWD5yPHaqdo,66
40
39
  lmnr/traceloop_sdk/tracing/content_allow_list.py,sha256=3feztm6PBWNelc8pAZUcQyEGyeSpNiVKjOaDk65l2ps,846
41
40
  lmnr/traceloop_sdk/tracing/context_manager.py,sha256=csVlB6kDmbgSPsROHwnddvGGblx55v6lJMRj0wsSMQM,304
42
41
  lmnr/traceloop_sdk/tracing/manual.py,sha256=RPwEreHHdzmw7g15u4G21GqhHOvRp7d72ylQNLG1jRM,1841
43
- lmnr/traceloop_sdk/tracing/tracing.py,sha256=iArjof6BC-VU9KXnd5aLXY_3RfWElBBbTsFUKbjubHE,42435
42
+ lmnr/traceloop_sdk/tracing/tracing.py,sha256=VFrf5D6CC3DquLy_19_5I_L_w1kO2X61KvPW0XD26-k,42347
44
43
  lmnr/traceloop_sdk/utils/__init__.py,sha256=pNhf0G3vTd5ccoc03i1MXDbricSaiqCbi1DLWhSekK8,604
45
44
  lmnr/traceloop_sdk/utils/in_memory_span_exporter.py,sha256=H_4TRaThMO1H6vUQ0OpQvzJk_fZH0OOsRAM1iZQXsR8,2112
46
45
  lmnr/traceloop_sdk/utils/json_encoder.py,sha256=dK6b_axr70IYL7Vv-bu4wntvDDuyntoqsHaddqX7P58,463
47
46
  lmnr/traceloop_sdk/utils/package_check.py,sha256=TZSngzJOpFhfUZLXIs38cpMxQiZSmp0D-sCrIyhz7BA,251
48
47
  lmnr/traceloop_sdk/version.py,sha256=OlatFEFA4ttqSSIiV8jdE-sq3KG5zu2hnC4B4mzWF3s,23
49
- lmnr-0.4.8.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
50
- lmnr-0.4.8.dist-info/METADATA,sha256=biZj3jg_XUVTA4Xgpq66VNfhfvCPi6aDmE2VK4UXXFQ,10792
51
- lmnr-0.4.8.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
52
- lmnr-0.4.8.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
53
- lmnr-0.4.8.dist-info/RECORD,,
48
+ lmnr-0.4.10.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
49
+ lmnr-0.4.10.dist-info/METADATA,sha256=VPXsfYwAy1uTv_qucCqgAmyGyZMngr0HuysSSdpX8Jw,10999
50
+ lmnr-0.4.10.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
51
+ lmnr-0.4.10.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
52
+ lmnr-0.4.10.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- # traceloop-sdk
2
-
3
- Traceloop’s Python SDK allows you to easily start monitoring and debugging your LLM execution. Tracing is done in a non-intrusive way, built on top of OpenTelemetry. You can choose to export the traces to Traceloop, or to your existing observability stack.
4
-
5
- ```python
6
- Traceloop.init(app_name="joke_generation_service")
7
-
8
- @workflow(name="joke_creation")
9
- def create_joke():
10
- completion = openai.ChatCompletion.create(
11
- model="gpt-3.5-turbo",
12
- messages=[{"role": "user", "content": "Tell me a joke about opentelemetry"}],
13
- )
14
-
15
- return completion.choices[0].message.content
16
- ```
File without changes
File without changes