lmnr 0.4.37__tar.gz → 0.4.39__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. {lmnr-0.4.37 → lmnr-0.4.39}/PKG-INFO +74 -53
  2. {lmnr-0.4.37 → lmnr-0.4.39}/README.md +22 -27
  3. lmnr-0.4.39/pyproject.toml +149 -0
  4. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/decorators.py +0 -8
  5. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/laminar.py +2 -15
  6. lmnr-0.4.37/pyproject.toml +0 -80
  7. {lmnr-0.4.37 → lmnr-0.4.39}/LICENSE +0 -0
  8. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/__init__.py +0 -0
  9. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/cli.py +0 -0
  10. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/.flake8 +0 -0
  11. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/.python-version +0 -0
  12. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/__init__.py +0 -0
  13. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/config/__init__.py +0 -0
  14. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/decorators/__init__.py +0 -0
  15. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/decorators/base.py +0 -0
  16. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/instruments.py +0 -0
  17. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/tracing/__init__.py +0 -0
  18. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/tracing/attributes.py +0 -0
  19. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/tracing/content_allow_list.py +0 -0
  20. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/tracing/context_manager.py +0 -0
  21. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/tracing/tracing.py +0 -0
  22. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/utils/__init__.py +0 -0
  23. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/utils/in_memory_span_exporter.py +0 -0
  24. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/utils/json_encoder.py +0 -0
  25. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/utils/package_check.py +0 -0
  26. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/openllmetry_sdk/version.py +0 -0
  27. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/__init__.py +0 -0
  28. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/datasets.py +0 -0
  29. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/evaluations.py +0 -0
  30. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/log.py +0 -0
  31. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/types.py +0 -0
  32. {lmnr-0.4.37 → lmnr-0.4.39}/src/lmnr/sdk/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: lmnr
3
- Version: 0.4.37
3
+ Version: 0.4.39
4
4
  Summary: Python SDK for Laminar AI
5
5
  License: Apache-2.0
6
6
  Author: lmnr.ai
@@ -12,6 +12,32 @@ Classifier: Programming Language :: Python :: 3.10
12
12
  Classifier: Programming Language :: Python :: 3.11
13
13
  Classifier: Programming Language :: Python :: 3.12
14
14
  Classifier: Programming Language :: Python :: 3.13
15
+ Provides-Extra: alephalpha
16
+ Provides-Extra: all
17
+ Provides-Extra: anthropic
18
+ Provides-Extra: bedrock
19
+ Provides-Extra: chromadb
20
+ Provides-Extra: cohere
21
+ Provides-Extra: google-generativeai
22
+ Provides-Extra: groq
23
+ Provides-Extra: haystack
24
+ Provides-Extra: lancedb
25
+ Provides-Extra: langchain
26
+ Provides-Extra: llamaindex
27
+ Provides-Extra: marqo
28
+ Provides-Extra: milvus
29
+ Provides-Extra: mistralai
30
+ Provides-Extra: ollama
31
+ Provides-Extra: openai
32
+ Provides-Extra: pinecone
33
+ Provides-Extra: qdrant
34
+ Provides-Extra: replicate
35
+ Provides-Extra: sagemaker
36
+ Provides-Extra: together
37
+ Provides-Extra: transformers
38
+ Provides-Extra: vertexai
39
+ Provides-Extra: watsonx
40
+ Provides-Extra: weaviate
15
41
  Requires-Dist: argparse (>=1.0,<2.0)
16
42
  Requires-Dist: backoff (>=2.0,<3.0)
17
43
  Requires-Dist: deprecated (>=1.0,<2.0)
@@ -19,35 +45,35 @@ Requires-Dist: jinja2 (>=3.0,<4.0)
19
45
  Requires-Dist: opentelemetry-api (>=1.28.0)
20
46
  Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (>=1.28.0)
21
47
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (>=1.28.0)
22
- Requires-Dist: opentelemetry-instrumentation-alephalpha (>=0.33.10)
23
- Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.33.10)
24
- Requires-Dist: opentelemetry-instrumentation-bedrock (>=0.33.10)
25
- Requires-Dist: opentelemetry-instrumentation-chromadb (>=0.33.10)
26
- Requires-Dist: opentelemetry-instrumentation-cohere (>=0.33.10)
27
- Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.33.10)
28
- Requires-Dist: opentelemetry-instrumentation-groq (>=0.33.10)
29
- Requires-Dist: opentelemetry-instrumentation-haystack (>=0.33.10)
30
- Requires-Dist: opentelemetry-instrumentation-lancedb (>=0.33.10)
31
- Requires-Dist: opentelemetry-instrumentation-langchain (>=0.33.10)
32
- Requires-Dist: opentelemetry-instrumentation-llamaindex (>=0.33.10)
33
- Requires-Dist: opentelemetry-instrumentation-marqo (>=0.33.10)
34
- Requires-Dist: opentelemetry-instrumentation-milvus (>=0.33.10)
35
- Requires-Dist: opentelemetry-instrumentation-mistralai (>=0.33.10)
36
- Requires-Dist: opentelemetry-instrumentation-ollama (>=0.33.10)
37
- Requires-Dist: opentelemetry-instrumentation-openai (>=0.33.10)
38
- Requires-Dist: opentelemetry-instrumentation-pinecone (>=0.33.10)
39
- Requires-Dist: opentelemetry-instrumentation-qdrant (>=0.33.10)
40
- Requires-Dist: opentelemetry-instrumentation-replicate (>=0.33.10)
48
+ Requires-Dist: opentelemetry-instrumentation-alephalpha (>=0.33.12) ; extra == "all" or extra == "alephalpha"
49
+ Requires-Dist: opentelemetry-instrumentation-anthropic (>=0.33.12) ; extra == "all" or extra == "anthropic"
50
+ Requires-Dist: opentelemetry-instrumentation-bedrock (>=0.33.12) ; extra == "all" or extra == "bedrock"
51
+ Requires-Dist: opentelemetry-instrumentation-chromadb (>=0.33.12) ; extra == "all" or extra == "chromadb"
52
+ Requires-Dist: opentelemetry-instrumentation-cohere (>=0.33.12) ; extra == "all" or extra == "cohere"
53
+ Requires-Dist: opentelemetry-instrumentation-google-generativeai (>=0.33.12) ; extra == "all" or extra == "google-generativeai"
54
+ Requires-Dist: opentelemetry-instrumentation-groq (>=0.33.12) ; extra == "all" or extra == "groq"
55
+ Requires-Dist: opentelemetry-instrumentation-haystack (>=0.33.12) ; extra == "all" or extra == "haystack"
56
+ Requires-Dist: opentelemetry-instrumentation-lancedb (>=0.33.12) ; extra == "all" or extra == "lancedb"
57
+ Requires-Dist: opentelemetry-instrumentation-langchain (>=0.33.12) ; extra == "all" or extra == "langchain"
58
+ Requires-Dist: opentelemetry-instrumentation-llamaindex (>=0.33.12) ; extra == "all" or extra == "llamaindex"
59
+ Requires-Dist: opentelemetry-instrumentation-marqo (>=0.33.12) ; extra == "all" or extra == "marqo"
60
+ Requires-Dist: opentelemetry-instrumentation-milvus (>=0.33.12) ; extra == "all" or extra == "milvus"
61
+ Requires-Dist: opentelemetry-instrumentation-mistralai (>=0.33.12) ; extra == "all" or extra == "mistralai"
62
+ Requires-Dist: opentelemetry-instrumentation-ollama (>=0.33.12) ; extra == "all" or extra == "ollama"
63
+ Requires-Dist: opentelemetry-instrumentation-openai (>=0.33.12) ; extra == "all" or extra == "openai"
64
+ Requires-Dist: opentelemetry-instrumentation-pinecone (>=0.33.12) ; extra == "all" or extra == "pinecone"
65
+ Requires-Dist: opentelemetry-instrumentation-qdrant (>=0.33.12) ; extra == "all" or extra == "qdrant"
66
+ Requires-Dist: opentelemetry-instrumentation-replicate (>=0.33.12) ; extra == "all" or extra == "replicate"
41
67
  Requires-Dist: opentelemetry-instrumentation-requests (>=0.49b0,<0.50)
42
- Requires-Dist: opentelemetry-instrumentation-sagemaker (>=0.33.10)
68
+ Requires-Dist: opentelemetry-instrumentation-sagemaker (>=0.33.12) ; extra == "all" or extra == "sagemaker"
43
69
  Requires-Dist: opentelemetry-instrumentation-sqlalchemy (>=0.49b0,<0.50)
44
70
  Requires-Dist: opentelemetry-instrumentation-threading (>=0.49b0,<0.50)
45
- Requires-Dist: opentelemetry-instrumentation-together (>=0.33.10)
46
- Requires-Dist: opentelemetry-instrumentation-transformers (>=0.33.10)
71
+ Requires-Dist: opentelemetry-instrumentation-together (>=0.33.12) ; extra == "all" or extra == "together"
72
+ Requires-Dist: opentelemetry-instrumentation-transformers (>=0.33.12) ; extra == "all" or extra == "transformers"
47
73
  Requires-Dist: opentelemetry-instrumentation-urllib3 (>=0.49b0,<0.50)
48
- Requires-Dist: opentelemetry-instrumentation-vertexai (>=0.33.10)
49
- Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.33.10)
50
- Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.33.10)
74
+ Requires-Dist: opentelemetry-instrumentation-vertexai (>=0.33.12) ; extra == "all" or extra == "vertexai"
75
+ Requires-Dist: opentelemetry-instrumentation-watsonx (>=0.33.12) ; extra == "all" or extra == "watsonx"
76
+ Requires-Dist: opentelemetry-instrumentation-weaviate (>=0.33.12) ; extra == "all" or extra == "weaviate"
51
77
  Requires-Dist: opentelemetry-sdk (>=1.28.0)
52
78
  Requires-Dist: opentelemetry-semantic-conventions-ai (==0.4.2)
53
79
  Requires-Dist: pydantic (>=2.7,<3.0)
@@ -72,23 +98,28 @@ Check our [open-source repo](https://github.com/lmnr-ai/lmnr) and don't forget t
72
98
 
73
99
  ## Quickstart
74
100
 
75
- First, install the package:
101
+ First, install the package, specifying the instrumentations you want to use.
102
+
103
+ For example, to install the package with OpenAI and Anthropic instrumentations:
76
104
 
77
105
  ```sh
78
- pip install lmnr
106
+ pip install 'lmnr[anthropic,openai]'
79
107
  ```
80
108
 
81
- And then in the code
109
+ To install all possible instrumentations, use the following command:
110
+
111
+ ```sh
112
+ pip install 'lmnr[all]'
113
+ ```
114
+
115
+ Initialize Laminar in your code:
82
116
 
83
117
  ```python
84
- from lmnr import Laminar as L
118
+ from lmnr import Laminar
85
119
 
86
- L.initialize(project_api_key="<PROJECT_API_KEY>")
120
+ Laminar.initialize(project_api_key="<PROJECT_API_KEY>")
87
121
  ```
88
122
 
89
- This will automatically instrument most of the LLM, Vector DB, and related
90
- calls with OpenTelemetry-compatible instrumentation.
91
-
92
123
  Note that you need to only initialize Laminar once in your application.
93
124
 
94
125
  ## Instrumentation
@@ -101,9 +132,9 @@ This can be useful if you want to trace a request handler or a function which co
101
132
  ```python
102
133
  import os
103
134
  from openai import OpenAI
104
- from lmnr import Laminar as L, Instruments
135
+ from lmnr import Laminar
105
136
 
106
- L.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"])
137
+ Laminar.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"])
107
138
 
108
139
  client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
109
140
 
@@ -126,9 +157,7 @@ def poem_writer(topic: str):
126
157
  @observe()
127
158
  def generate_poems():
128
159
  poem1 = poem_writer(topic="laminar flow")
129
- L.event("is_poem_generated", True)
130
160
  poem2 = poem_writer(topic="turbulence")
131
- L.event("is_poem_generated", True)
132
161
  poems = f"{poem1}\n\n---\n\n{poem2}"
133
162
  return poems
134
163
  ```
@@ -137,18 +166,10 @@ Also, you can use `Laminar.start_as_current_span` if you want to record a chunk
137
166
 
138
167
  ```python
139
168
  def handle_user_request(topic: str):
140
- with L.start_as_current_span(name="poem_writer", input=topic):
141
- ...
142
-
169
+ with Laminar.start_as_current_span(name="poem_writer", input=topic):
143
170
  poem = poem_writer(topic=topic)
144
-
145
- ...
146
-
147
- # while within the span, you can attach laminar events to it
148
- L.event("is_poem_generated", True)
149
-
150
171
  # Use set_span_output to record the output of the span
151
- L.set_span_output(poem)
172
+ Laminar.set_span_output(poem)
152
173
  ```
153
174
 
154
175
  ### Automatic instrumentation
@@ -163,9 +184,9 @@ calls with OpenTelemetry-compatible instrumentation, then pass the appropriate i
163
184
  For example, if you want to only instrument OpenAI and Anthropic, then do the following:
164
185
 
165
186
  ```python
166
- from lmnr import Laminar as L, Instruments
187
+ from lmnr import Laminar, Instruments
167
188
 
168
- L.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"], instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
189
+ Laminar.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"], instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
169
190
  ```
170
191
 
171
192
  If you want to fully disable any kind of autoinstrumentation, pass an empty set as `instruments=set()` to `.initialize()`.
@@ -246,11 +267,11 @@ Once your pipeline target is set, you can call it from Python in just a few line
246
267
  Example use:
247
268
 
248
269
  ```python
249
- from lmnr import Laminar as L
270
+ from lmnr import Laminar
250
271
 
251
- L.initialize('<YOUR_PROJECT_API_KEY>', instruments=set())
272
+ Laminar.initialize('<YOUR_PROJECT_API_KEY>', instruments=set())
252
273
 
253
- result = l.run(
274
+ result = Laminar.run(
254
275
  pipeline = 'my_pipeline_name',
255
276
  inputs = {'input_node_name': 'some_value'},
256
277
  # all environment variables
@@ -13,23 +13,28 @@ Check our [open-source repo](https://github.com/lmnr-ai/lmnr) and don't forget t
13
13
 
14
14
  ## Quickstart
15
15
 
16
- First, install the package:
16
+ First, install the package, specifying the instrumentations you want to use.
17
+
18
+ For example, to install the package with OpenAI and Anthropic instrumentations:
17
19
 
18
20
  ```sh
19
- pip install lmnr
21
+ pip install 'lmnr[anthropic,openai]'
20
22
  ```
21
23
 
22
- And then in the code
24
+ To install all possible instrumentations, use the following command:
25
+
26
+ ```sh
27
+ pip install 'lmnr[all]'
28
+ ```
29
+
30
+ Initialize Laminar in your code:
23
31
 
24
32
  ```python
25
- from lmnr import Laminar as L
33
+ from lmnr import Laminar
26
34
 
27
- L.initialize(project_api_key="<PROJECT_API_KEY>")
35
+ Laminar.initialize(project_api_key="<PROJECT_API_KEY>")
28
36
  ```
29
37
 
30
- This will automatically instrument most of the LLM, Vector DB, and related
31
- calls with OpenTelemetry-compatible instrumentation.
32
-
33
38
  Note that you need to only initialize Laminar once in your application.
34
39
 
35
40
  ## Instrumentation
@@ -42,9 +47,9 @@ This can be useful if you want to trace a request handler or a function which co
42
47
  ```python
43
48
  import os
44
49
  from openai import OpenAI
45
- from lmnr import Laminar as L, Instruments
50
+ from lmnr import Laminar
46
51
 
47
- L.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"])
52
+ Laminar.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"])
48
53
 
49
54
  client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
50
55
 
@@ -67,9 +72,7 @@ def poem_writer(topic: str):
67
72
  @observe()
68
73
  def generate_poems():
69
74
  poem1 = poem_writer(topic="laminar flow")
70
- L.event("is_poem_generated", True)
71
75
  poem2 = poem_writer(topic="turbulence")
72
- L.event("is_poem_generated", True)
73
76
  poems = f"{poem1}\n\n---\n\n{poem2}"
74
77
  return poems
75
78
  ```
@@ -78,18 +81,10 @@ Also, you can use `Laminar.start_as_current_span` if you want to record a chunk
78
81
 
79
82
  ```python
80
83
  def handle_user_request(topic: str):
81
- with L.start_as_current_span(name="poem_writer", input=topic):
82
- ...
83
-
84
+ with Laminar.start_as_current_span(name="poem_writer", input=topic):
84
85
  poem = poem_writer(topic=topic)
85
-
86
- ...
87
-
88
- # while within the span, you can attach laminar events to it
89
- L.event("is_poem_generated", True)
90
-
91
86
  # Use set_span_output to record the output of the span
92
- L.set_span_output(poem)
87
+ Laminar.set_span_output(poem)
93
88
  ```
94
89
 
95
90
  ### Automatic instrumentation
@@ -104,9 +99,9 @@ calls with OpenTelemetry-compatible instrumentation, then pass the appropriate i
104
99
  For example, if you want to only instrument OpenAI and Anthropic, then do the following:
105
100
 
106
101
  ```python
107
- from lmnr import Laminar as L, Instruments
102
+ from lmnr import Laminar, Instruments
108
103
 
109
- L.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"], instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
104
+ Laminar.initialize(project_api_key=os.environ["LMNR_PROJECT_API_KEY"], instruments={Instruments.OPENAI, Instruments.ANTHROPIC})
110
105
  ```
111
106
 
112
107
  If you want to fully disable any kind of autoinstrumentation, pass an empty set as `instruments=set()` to `.initialize()`.
@@ -187,11 +182,11 @@ Once your pipeline target is set, you can call it from Python in just a few line
187
182
  Example use:
188
183
 
189
184
  ```python
190
- from lmnr import Laminar as L
185
+ from lmnr import Laminar
191
186
 
192
- L.initialize('<YOUR_PROJECT_API_KEY>', instruments=set())
187
+ Laminar.initialize('<YOUR_PROJECT_API_KEY>', instruments=set())
193
188
 
194
- result = l.run(
189
+ result = Laminar.run(
195
190
  pipeline = 'my_pipeline_name',
196
191
  inputs = {'input_node_name': 'some_value'},
197
192
  # all environment variables
@@ -0,0 +1,149 @@
1
+ # Laminar Python
2
+
3
+ # If you are looking for information about possible extras installations,
4
+ # i.e. what you can pass into `pip install 'lmnr[extra1,extra2]'`, please see the
5
+ # `[tool.poetry.extras]` section below.
6
+
7
+ [project]
8
+ name = "lmnr"
9
+ version = "0.4.39"
10
+ description = "Python SDK for Laminar AI"
11
+ authors = [
12
+ { name = "lmnr.ai", email = "founders@lmnr.ai" }
13
+ ]
14
+ readme = "README.md"
15
+ requires-python = ">=3.9,<4"
16
+ license = "Apache-2.0"
17
+
18
+ [tool.poetry]
19
+ name = "lmnr"
20
+ version = "0.4.39"
21
+ description = "Python SDK for Laminar AI"
22
+ authors = ["lmnr.ai"]
23
+ readme = "README.md"
24
+ license = "Apache-2.0"
25
+
26
+ [tool.poetry.dependencies]
27
+ python = ">=3.9,<4"
28
+ pydantic = "~=2.7"
29
+ requests = "~=2.0"
30
+ python-dotenv = "~=1.0"
31
+ backoff = "~=2.0"
32
+ opentelemetry-api = ">=1.28.0"
33
+ opentelemetry-sdk = ">=1.28.0"
34
+ opentelemetry-exporter-otlp-proto-http = ">=1.28.0"
35
+ opentelemetry-exporter-otlp-proto-grpc = ">=1.28.0"
36
+ opentelemetry-instrumentation-requests = "^0.49b0"
37
+ opentelemetry-instrumentation-sqlalchemy = "^0.49b0"
38
+ opentelemetry-instrumentation-urllib3 = "^0.49b0"
39
+ opentelemetry-instrumentation-threading = "^0.49b0"
40
+ opentelemetry-semantic-conventions-ai = "0.4.2"
41
+ tenacity = ">=8.0"
42
+ jinja2 = "~=3.0"
43
+ deprecated = "~=1.0"
44
+ tqdm = "~=4.0"
45
+ argparse = "~=1.0"
46
+ opentelemetry-instrumentation-alephalpha = {version = ">=0.33.12", optional = true}
47
+ opentelemetry-instrumentation-anthropic = {version = ">=0.33.12", optional = true}
48
+ opentelemetry-instrumentation-bedrock = {version = ">=0.33.12", optional = true}
49
+ opentelemetry-instrumentation-chromadb = {version = ">=0.33.12", optional = true}
50
+ opentelemetry-instrumentation-cohere = {version = ">=0.33.12", optional = true}
51
+ opentelemetry-instrumentation-google-generativeai = {version = ">=0.33.12", optional = true}
52
+ opentelemetry-instrumentation-groq = {version = ">=0.33.12", optional = true}
53
+ opentelemetry-instrumentation-haystack = {version = ">=0.33.12", optional = true}
54
+ opentelemetry-instrumentation-lancedb = {version = ">=0.33.12", optional = true}
55
+ opentelemetry-instrumentation-langchain = {version = ">=0.33.12", optional = true}
56
+ opentelemetry-instrumentation-llamaindex = {version = ">=0.33.12", optional = true}
57
+ opentelemetry-instrumentation-marqo = {version = ">=0.33.12", optional = true}
58
+ opentelemetry-instrumentation-milvus = {version = ">=0.33.12", optional = true}
59
+ opentelemetry-instrumentation-mistralai = {version = ">=0.33.12", optional = true}
60
+ opentelemetry-instrumentation-ollama = {version = ">=0.33.12", optional = true}
61
+ opentelemetry-instrumentation-openai = {version = ">=0.33.12", optional = true}
62
+ opentelemetry-instrumentation-pinecone = {version = ">=0.33.12", optional = true}
63
+ opentelemetry-instrumentation-qdrant = {version = ">=0.33.12", optional = true}
64
+ opentelemetry-instrumentation-replicate = {version = ">=0.33.12", optional = true}
65
+ opentelemetry-instrumentation-sagemaker = {version = ">=0.33.12", optional = true}
66
+ opentelemetry-instrumentation-together = {version = ">=0.33.12", optional = true}
67
+ opentelemetry-instrumentation-transformers = {version = ">=0.33.12", optional = true}
68
+ opentelemetry-instrumentation-vertexai = {version = ">=0.33.12", optional = true}
69
+ opentelemetry-instrumentation-watsonx = {version = ">=0.33.12", optional = true}
70
+ opentelemetry-instrumentation-weaviate = {version = ">=0.33.12", optional = true}
71
+
72
+ # List of all possible extras. You can specify one or more of these extras
73
+ # when installing the package, using any of the following:
74
+ # `pip install 'lmnr[anthropic,openai]'`
75
+ # `uv pip install 'lmnr[anthropic,openai]'`
76
+ # `uv add lmnr --extra anthropic --extra openai`
77
+ # `poetry add 'lmnr[anthropic,openai]'`
78
+
79
+ # `all` is the group added for convenience, if you want to install all
80
+ # the instrumentations.
81
+ [tool.poetry.extras]
82
+ all = [
83
+ "opentelemetry-instrumentation-alephalpha",
84
+ "opentelemetry-instrumentation-anthropic",
85
+ "opentelemetry-instrumentation-bedrock",
86
+ "opentelemetry-instrumentation-chromadb",
87
+ "opentelemetry-instrumentation-cohere",
88
+ "opentelemetry-instrumentation-google-generativeai",
89
+ "opentelemetry-instrumentation-groq",
90
+ "opentelemetry-instrumentation-haystack",
91
+ "opentelemetry-instrumentation-lancedb",
92
+ "opentelemetry-instrumentation-langchain",
93
+ "opentelemetry-instrumentation-llamaindex",
94
+ "opentelemetry-instrumentation-marqo",
95
+ "opentelemetry-instrumentation-milvus",
96
+ "opentelemetry-instrumentation-mistralai",
97
+ "opentelemetry-instrumentation-ollama",
98
+ "opentelemetry-instrumentation-openai",
99
+ "opentelemetry-instrumentation-pinecone",
100
+ "opentelemetry-instrumentation-qdrant",
101
+ "opentelemetry-instrumentation-replicate",
102
+ "opentelemetry-instrumentation-sagemaker",
103
+ "opentelemetry-instrumentation-together",
104
+ "opentelemetry-instrumentation-transformers",
105
+ "opentelemetry-instrumentation-vertexai",
106
+ "opentelemetry-instrumentation-watsonx",
107
+ "opentelemetry-instrumentation-weaviate"
108
+ ]
109
+ alephalpha=["opentelemetry-instrumentation-alephalpha"]
110
+ anthropic=["opentelemetry-instrumentation-anthropic"]
111
+ bedrock=["opentelemetry-instrumentation-bedrock"]
112
+ chromadb=["opentelemetry-instrumentation-chromadb"]
113
+ cohere=["opentelemetry-instrumentation-cohere"]
114
+ google-generativeai=["opentelemetry-instrumentation-google-generativeai"]
115
+ groq=["opentelemetry-instrumentation-groq"]
116
+ haystack=["opentelemetry-instrumentation-haystack"]
117
+ lancedb=["opentelemetry-instrumentation-lancedb"]
118
+ langchain=["opentelemetry-instrumentation-langchain"]
119
+ llamaindex=["opentelemetry-instrumentation-llamaindex"]
120
+ marqo=["opentelemetry-instrumentation-marqo"]
121
+ milvus=["opentelemetry-instrumentation-milvus"]
122
+ mistralai=["opentelemetry-instrumentation-mistralai"]
123
+ ollama=["opentelemetry-instrumentation-ollama"]
124
+ openai=["opentelemetry-instrumentation-openai"]
125
+ pinecone=["opentelemetry-instrumentation-pinecone"]
126
+ qdrant=["opentelemetry-instrumentation-qdrant"]
127
+ replicate=["opentelemetry-instrumentation-replicate"]
128
+ sagemaker=["opentelemetry-instrumentation-sagemaker"]
129
+ together=["opentelemetry-instrumentation-together"]
130
+ transformers=["opentelemetry-instrumentation-transformers"]
131
+ vertexai=["opentelemetry-instrumentation-vertexai"]
132
+ watsonx=["opentelemetry-instrumentation-watsonx"]
133
+ weaviate=["opentelemetry-instrumentation-weaviate"]
134
+
135
+ [tool.poetry.group.dev.dependencies]
136
+ autopep8 = "^2.2.0"
137
+ flake8 = "7.0.0"
138
+ pytest = "^8.2.2"
139
+ pytest-sugar = "1.0.0"
140
+
141
+ [build-system]
142
+ requires = ["poetry-core"]
143
+ build-backend = "poetry.core.masonry.api"
144
+
145
+ [tool.poetry.scripts]
146
+ lmnr = "lmnr.cli:cli"
147
+
148
+ [project.optional-dependencies]
149
+ test = ["pytest"]
@@ -20,7 +20,6 @@ R = TypeVar("R")
20
20
  def observe(
21
21
  *,
22
22
  name: Optional[str] = None,
23
- user_id: Optional[str] = None,
24
23
  session_id: Optional[str] = None,
25
24
  ) -> Callable[[Callable[P, R]], Callable[P, R]]:
26
25
  """The main decorator entrypoint for Laminar. This is used to wrap
@@ -30,9 +29,6 @@ def observe(
30
29
  name (Optional[str], optional): Name of the span. Function
31
30
  name is used if not specified.
32
31
  Defaults to None.
33
- user_id (Optional[str], optional): User ID to associate
34
- with the span and the following context.
35
- Defaults to None.
36
32
  session_id (Optional[str], optional): Session ID to associate with the
37
33
  span and the following context. Defaults to None.
38
34
 
@@ -49,13 +45,9 @@ def observe(
49
45
  if current_span != INVALID_SPAN:
50
46
  if session_id is not None:
51
47
  current_span.set_attribute(SESSION_ID, session_id)
52
- if user_id is not None:
53
- current_span.set_attribute(USER_ID, user_id)
54
48
  association_properties = {}
55
49
  if session_id is not None:
56
50
  association_properties["session_id"] = session_id
57
- if user_id is not None:
58
- association_properties["user_id"] = user_id
59
51
  update_association_properties(association_properties)
60
52
  return (
61
53
  aentity_method(name=name)(func)
@@ -36,7 +36,6 @@ from lmnr.openllmetry_sdk.tracing.attributes import (
36
36
  SPAN_OUTPUT,
37
37
  SPAN_PATH,
38
38
  TRACE_TYPE,
39
- USER_ID,
40
39
  )
41
40
  from lmnr.openllmetry_sdk.tracing.tracing import (
42
41
  get_span_path,
@@ -588,7 +587,6 @@ class Laminar:
588
587
  def set_session(
589
588
  cls,
590
589
  session_id: Optional[str] = None,
591
- user_id: Optional[str] = None,
592
590
  ):
593
591
  """Set the session and user id for the current span and the context
594
592
  (i.e. any children spans created from the current span in the current
@@ -599,29 +597,18 @@ class Laminar:
599
597
  Useful to debug and group long-running\
600
598
  sessions/conversations.
601
599
  Defaults to None.
602
- user_id (Optional[str], optional). Deprecated.\
603
- Use `Laminar.set_metadata` instead.\
604
- Custom user id.\
605
- Useful for grouping spans or traces by user.\
606
- Defaults to None.
607
600
  """
608
601
  association_properties = {}
609
602
  if session_id is not None:
610
603
  association_properties[SESSION_ID] = session_id
611
- if user_id is not None:
612
- cls.__logger.warning(
613
- "User ID in set_session is deprecated and will be removed soon. "
614
- "Please use `Laminar.set_metadata` instead."
615
- )
616
- association_properties["metadata." + USER_ID] = user_id
617
604
  update_association_properties(association_properties)
618
605
 
619
606
  @classmethod
620
- def set_metadata(cls, metadata: dict[str, Any]):
607
+ def set_metadata(cls, metadata: dict[str, str]):
621
608
  """Set the metadata for the current trace.
622
609
 
623
610
  Args:
624
- metadata (dict[str, Any]): Metadata to set for the trace. Willl be\
611
+ metadata (dict[str, str]): Metadata to set for the trace. Willl be\
625
612
  sent as attributes, so must be json serializable.
626
613
  """
627
614
  props = {f"metadata.{k}": json_dumps(v) for k, v in metadata.items()}
@@ -1,80 +0,0 @@
1
- [project]
2
- name = "lmnr"
3
- version = "0.4.37"
4
- description = "Python SDK for Laminar AI"
5
- authors = [
6
- { name = "lmnr.ai", email = "founders@lmnr.ai" }
7
- ]
8
- readme = "README.md"
9
- requires-python = ">=3.9,<4"
10
- license = "Apache-2.0"
11
-
12
- [tool.poetry]
13
- name = "lmnr"
14
- version = "0.4.37"
15
- description = "Python SDK for Laminar AI"
16
- authors = ["lmnr.ai"]
17
- readme = "README.md"
18
- license = "Apache-2.0"
19
-
20
- [tool.poetry.dependencies]
21
- python = ">=3.9,<4"
22
- pydantic = "~=2.7"
23
- requests = "~=2.0"
24
- python-dotenv = "~=1.0"
25
- backoff = "~=2.0"
26
- opentelemetry-api = ">=1.28.0"
27
- opentelemetry-sdk = ">=1.28.0"
28
- opentelemetry-exporter-otlp-proto-http = ">=1.28.0"
29
- opentelemetry-exporter-otlp-proto-grpc = ">=1.28.0"
30
- opentelemetry-instrumentation-requests = "^0.49b0"
31
- opentelemetry-instrumentation-sqlalchemy = "^0.49b0"
32
- opentelemetry-instrumentation-urllib3 = "^0.49b0"
33
- opentelemetry-instrumentation-threading = "^0.49b0"
34
- opentelemetry-semantic-conventions-ai = "0.4.2"
35
- tenacity = ">=8.0"
36
- jinja2 = "~=3.0"
37
- deprecated = "~=1.0"
38
- opentelemetry-instrumentation-alephalpha = ">=0.33.10"
39
- opentelemetry-instrumentation-anthropic = ">=0.33.10"
40
- opentelemetry-instrumentation-bedrock = ">=0.33.10"
41
- opentelemetry-instrumentation-chromadb = ">=0.33.10"
42
- opentelemetry-instrumentation-cohere = ">=0.33.10"
43
- opentelemetry-instrumentation-google-generativeai = ">=0.33.10"
44
- opentelemetry-instrumentation-groq = ">=0.33.10"
45
- opentelemetry-instrumentation-haystack = ">=0.33.10"
46
- opentelemetry-instrumentation-lancedb = ">=0.33.10"
47
- opentelemetry-instrumentation-langchain = ">=0.33.10"
48
- opentelemetry-instrumentation-llamaindex = ">=0.33.10"
49
- opentelemetry-instrumentation-marqo = ">=0.33.10"
50
- opentelemetry-instrumentation-milvus = ">=0.33.10"
51
- opentelemetry-instrumentation-mistralai = ">=0.33.10"
52
- opentelemetry-instrumentation-ollama = ">=0.33.10"
53
- opentelemetry-instrumentation-openai = ">=0.33.10"
54
- opentelemetry-instrumentation-pinecone = ">=0.33.10"
55
- opentelemetry-instrumentation-qdrant = ">=0.33.10"
56
- opentelemetry-instrumentation-replicate = ">=0.33.10"
57
- opentelemetry-instrumentation-sagemaker = ">=0.33.10"
58
- opentelemetry-instrumentation-together = ">=0.33.10"
59
- opentelemetry-instrumentation-transformers = ">=0.33.10"
60
- opentelemetry-instrumentation-vertexai = ">=0.33.10"
61
- opentelemetry-instrumentation-watsonx = ">=0.33.10"
62
- opentelemetry-instrumentation-weaviate = ">=0.33.10"
63
- tqdm = "~=4.0"
64
- argparse = "~=1.0"
65
-
66
- [tool.poetry.group.dev.dependencies]
67
- autopep8 = "^2.2.0"
68
- flake8 = "7.0.0"
69
- pytest = "^8.2.2"
70
- pytest-sugar = "1.0.0"
71
-
72
- [build-system]
73
- requires = ["poetry-core"]
74
- build-backend = "poetry.core.masonry.api"
75
-
76
- [tool.poetry.scripts]
77
- lmnr = "lmnr.cli:cli"
78
-
79
- [project.optional-dependencies]
80
- test = ["pytest"]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes