lmnr 0.2.3.1__py3-none-any.whl → 0.2.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,7 @@
1
1
  from typing import Callable, Optional
2
2
  from websockets.sync.client import connect
3
- from lmnr.types import DeregisterDebuggerRequest, NodeInput, RegisterDebuggerRequest, SDKError, ToolCall
3
+ import websockets
4
+ from lmnr.types import DeregisterDebuggerRequest, NodeInput, RegisterDebuggerRequest, SDKError, ToolCall, ToolCallError
4
5
  import uuid
5
6
  import json
6
7
  from threading import Thread
@@ -47,6 +48,9 @@ class RemoteDebugger:
47
48
  message = websocket.recv(3)
48
49
  except TimeoutError:
49
50
  continue
51
+ except websockets.exceptions.ConnectionClosedError:
52
+ print("Connection closed. Please restart the debugger.")
53
+ return
50
54
  try:
51
55
  tool_call = ToolCall.model_validate_json(message)
52
56
  except:
@@ -56,11 +60,11 @@ class RemoteDebugger:
56
60
  if tool.__name__ == tool_call.function.name
57
61
  ]
58
62
  if not matching_tools:
59
- raise SDKError(
60
- f'Tool {tool_call.function.name} not found.'
61
- ' Registered tools: '
62
- f'{", ".join([tool.__name__ for tool in self.tools])}'
63
- )
63
+ error_message = f'Tool {tool_call.function.name} not found.' +\
64
+ f' Registered tools: {", ".join([tool.__name__ for tool in self.tools])}'
65
+ e = ToolCallError(error=error_message)
66
+ websocket.send(e.model_dump_json())
67
+ continue
64
68
  tool = matching_tools[0]
65
69
  if tool.__name__ == tool_call.function.name:
66
70
  # default the arguments to an empty dictionary
@@ -69,8 +73,13 @@ class RemoteDebugger:
69
73
  arguments = json.loads(tool_call.function.arguments)
70
74
  except:
71
75
  pass
72
- response = tool(**arguments)
73
- websocket.send(json.dumps(response))
76
+ try:
77
+ response = tool(**arguments) # of type NodeInput
78
+ websocket.send(json.dumps(response))
79
+ except Exception as e:
80
+ error_message = f'Error occurred while running tool {tool.__name__}: {e}'
81
+ e = ToolCallError(error=error_message)
82
+ websocket.send(e.model_dump_json())
74
83
  websocket.send(DeregisterDebuggerRequest(debuggerSessionId=self.session, deregister=True).model_dump_json())
75
84
 
76
85
  def _generate_session_id(self) -> str:
lmnr/types.py CHANGED
@@ -39,8 +39,8 @@ class EndpointRunError(Exception):
39
39
  return super().__str__()
40
40
 
41
41
  class SDKError(Exception):
42
- def __init__(self, error_mesasge: str):
43
- super().__init__(error_mesasge)
42
+ def __init__(self, error_message: str):
43
+ super().__init__(error_message)
44
44
 
45
45
  class ToolCallRequest(pydantic.BaseModel):
46
46
  name: str
@@ -51,6 +51,10 @@ class ToolCall(pydantic.BaseModel):
51
51
  type: Optional[str]
52
52
  function: ToolCallRequest
53
53
 
54
+ ToolCallResponse = NodeInput
55
+ class ToolCallError(pydantic.BaseModel):
56
+ error: str
57
+
54
58
  # TODO: allow snake_case and manually convert to camelCase
55
59
  class RegisterDebuggerRequest(pydantic.BaseModel):
56
60
  debuggerSessionId: str
@@ -0,0 +1,186 @@
1
+ Metadata-Version: 2.1
2
+ Name: lmnr
3
+ Version: 0.2.5
4
+ Summary: Python SDK for Laminar AI
5
+ License: Apache-2.0
6
+ Author: lmnr.ai
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: License :: OSI Approved :: Apache Software License
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: Programming Language :: Python :: 3.9
11
+ Classifier: Programming Language :: Python :: 3.10
12
+ Classifier: Programming Language :: Python :: 3.11
13
+ Classifier: Programming Language :: Python :: 3.12
14
+ Requires-Dist: black (>=24.4.2,<25.0.0)
15
+ Requires-Dist: click (>=8.1.7,<9.0.0)
16
+ Requires-Dist: cookiecutter (>=2.6.0,<3.0.0)
17
+ Requires-Dist: pydantic (>=2.7.4,<3.0.0)
18
+ Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
19
+ Requires-Dist: requests (>=2.32.3,<3.0.0)
20
+ Requires-Dist: urllib3 (==1.26.6)
21
+ Requires-Dist: websockets (>=12.0,<13.0)
22
+ Description-Content-Type: text/markdown
23
+
24
+ # Python SDK for Laminar AI
25
+
26
+ ## Quickstart
27
+ ```sh
28
+ python3 -m venv .myenv
29
+ source .myenv/bin/activate # or use your favorite env management tool
30
+
31
+ pip install lmnr
32
+ ```
33
+
34
+ ## Features
35
+
36
+ - Make Laminar endpoint calls from your Python code
37
+ - Make Laminar endpoint calls that can run your own functions as tools
38
+ - CLI to generate code from pipelines you build on Laminar
39
+ - `LaminarRemoteDebugger` to execute your own functions while you test your flows in workshop
40
+
41
+ ## Making Laminar endpoint calls
42
+
43
+ After you are ready to use your pipeline in your code, deploy it in Laminar following the [docs](https://docs.lmnr.ai/pipeline/run-save-deploy#deploying-a-pipeline-version).
44
+
45
+ Once your pipeline is deployed, you can call it from Python in just a few lines.
46
+
47
+ Example use:
48
+
49
+ ```python
50
+ from lmnr import Laminar
51
+
52
+ l = Laminar('<YOUR_PROJECT_API_KEY>')
53
+ result = l.run(
54
+ endpoint = 'my_endpoint_name',
55
+ inputs = {'input_node_name': 'some_value'},
56
+ # all environment variables
57
+ env = {'OPENAI_API_KEY': 'sk-some-key'},
58
+ # any metadata to attach to this run's trace
59
+ metadata = {'session_id': 'your_custom_session_id'}
60
+ )
61
+ ```
62
+
63
+ Resulting in:
64
+
65
+ ```python
66
+ >>> result
67
+ EndpointRunResponse(
68
+ outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}},
69
+ # useful to locate your trace
70
+ run_id='53b012d5-5759-48a6-a9c5-0011610e3669'
71
+ )
72
+ ```
73
+
74
+ ## Making calls to pipelines that run your own logic
75
+
76
+ If your pipeline contains tool call nodes, they will be able to call your local code.
77
+ The only difference is that you need to pass references
78
+ to the functions you want to call right into our SDK.
79
+
80
+ Example use:
81
+
82
+ ```python
83
+ from lmnr import Laminar, NodeInput
84
+
85
+ # adding **kwargs is safer, in case an LLM produces more arguments than needed
86
+ def my_tool(arg1: string, arg2: string, **kwargs) -> NodeInput {
87
+ return f'{arg1}&{arg2}'
88
+ }
89
+
90
+ l = Laminar('<YOUR_PROJECT_API_KEY>')
91
+ result = l.run(
92
+ endpoint = 'my_endpoint_name',
93
+ inputs = {'input_node_name': 'some_value'},
94
+ # all environment variables
95
+ env = {'OPENAI_API_KEY': '<YOUR_MODEL_PROVIDER_KEY>'},
96
+ # any metadata to attach to this run's trace
97
+ metadata = {'session_id': 'your_custom_session_id'},
98
+ # specify as many tools as needed.
99
+ # Each tool name must match tool node name in the pipeline
100
+ tools=[my_tool]
101
+ )
102
+ ```
103
+
104
+ ## LaminarRemoteDebugger
105
+
106
+ If your pipeline contains tool call nodes, they will be able to call your local code.
107
+ If you want to test them from the Laminar workshop in your browser, you can attach to your
108
+ locally running debugger.
109
+
110
+ ### Step by step instructions to use `LaminarRemoteDebugger`:
111
+
112
+ #### 1. Create your pipeline with tool call nodes
113
+
114
+ Add tool calls to your pipeline; node names must match the functions you want to call.
115
+
116
+ #### 2. Start LaminarRemoteDebugger in your code
117
+
118
+ Example:
119
+
120
+ ```python
121
+ from lmnr import LaminarRemoteDebugger, NodeInput
122
+
123
+ # adding **kwargs is safer, in case an LLM produces more arguments than needed
124
+ def my_tool(arg1: string, arg2: string, **kwargs) -> NodeInput:
125
+ return f'{arg1}&{arg2}'
126
+
127
+ debugger = LaminarRemoteDebugger('<YOUR_PROJECT_API_KEY>', [my_tool])
128
+ session_id = debugger.start() # the session id will also be printed to console
129
+ ```
130
+
131
+ This will establish a connection with Laminar API and allow for the pipeline execution
132
+ to call your local functions.
133
+
134
+ #### 3. Link lmnr.ai workshop to your debugger
135
+
136
+ Set up `DEBUGGER_SESSION_ID` environment variable in your pipeline.
137
+
138
+ #### 4. Run and experiment
139
+
140
+ You can run as many sessions as you need, experimenting with your flows.
141
+
142
+ #### 5. Stop the debugger
143
+
144
+ In order to stop the session, do
145
+
146
+ ```python
147
+ debugger.stop()
148
+ ```
149
+
150
+ ## CLI for code generation
151
+
152
+ ### Basic usage
153
+
154
+ ```
155
+ lmnr pull <pipeline_name> <pipeline_version_name> --project-api-key <PROJECT_API_KEY>
156
+ ```
157
+
158
+ Note that `lmnr` CLI command will only be available from within the virtual environment
159
+ where you have installed the package.
160
+
161
+ To import your pipeline
162
+ ```python
163
+ # submodule with the name of your pipeline will be generated in lmnr_engine.pipelines
164
+ from lmnr_engine.pipelines.my_custom_pipeline import MyCustomPipeline
165
+
166
+
167
+ pipeline = MyCustomPipeline()
168
+ res = pipeline.run(
169
+ inputs={
170
+ "instruction": "Write me a short linkedin post about a dev tool for LLM developers"
171
+ },
172
+ env={
173
+ "OPENAI_API_KEY": <OPENAI_API_KEY>,
174
+ }
175
+ )
176
+ print(f"RESULT:\n{res}")
177
+ ```
178
+
179
+ ### Current functionality
180
+ - Supports graph generation for graphs with Input, Output, and LLM nodes only
181
+ - For LLM nodes, it only supports OpenAI and Anthropic models and doesn't support structured output
182
+
183
+ ## PROJECT_API_KEY
184
+
185
+ Read more [here](https://docs.lmnr.ai/api-reference/introduction#authentication) on how to get `PROJECT_API_KEY`.
186
+
@@ -19,10 +19,10 @@ lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipel
19
19
  lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/{{cookiecutter.pipeline_dir_name}}.py,sha256=WG-ZMofPpGXCx5jdWVry3_XBzcKjqn8ZycFSiWEOBPg,2858
20
20
  lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/types.py,sha256=iWuflMV7TiaBPs6-B-BlrovvWpZgHGGHK0v8rSqER7A,997
21
21
  lmnr/sdk/endpoint.py,sha256=tT6-w-mwbh4BAwnj5G0pCVE_Sz8EUzZmpBtacm_T2pE,6359
22
- lmnr/sdk/remote_debugger.py,sha256=lAL7t0DXADLMTLsh6AN3rqVYf_A-aeG_mMbyU-G2wTk,3299
23
- lmnr/types.py,sha256=Pi6R5qMmN4fiwZR0TzP4rxfSP6rFv4iouZ_OKnJsmhA,1685
24
- lmnr-0.2.3.1.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
25
- lmnr-0.2.3.1.dist-info/METADATA,sha256=pOu2E4OUgWKruxNR6b5DyeLHJWlZji9EapcC0JqEQNE,2260
26
- lmnr-0.2.3.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
27
- lmnr-0.2.3.1.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
28
- lmnr-0.2.3.1.dist-info/RECORD,,
22
+ lmnr/sdk/remote_debugger.py,sha256=pNSmmk-KAAOUygKQAeTB1DbB3vvlpHL8JLwPBpBhBwA,3892
23
+ lmnr/types.py,sha256=Y41GGiCVemBUCTKedCwDhxjdsoO8uENr3LRwfdp9MI0,1771
24
+ lmnr-0.2.5.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
25
+ lmnr-0.2.5.dist-info/METADATA,sha256=BgYsaYsxprvRZ5-RxZk8DQdSTKmF51hOvU27hg_0wn4,5492
26
+ lmnr-0.2.5.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
27
+ lmnr-0.2.5.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
28
+ lmnr-0.2.5.dist-info/RECORD,,
@@ -1,78 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: lmnr
3
- Version: 0.2.3.1
4
- Summary: Python SDK for Laminar AI
5
- License: Apache-2.0
6
- Author: lmnr.ai
7
- Requires-Python: >=3.9,<4.0
8
- Classifier: License :: OSI Approved :: Apache Software License
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: Programming Language :: Python :: 3.9
11
- Classifier: Programming Language :: Python :: 3.10
12
- Classifier: Programming Language :: Python :: 3.11
13
- Classifier: Programming Language :: Python :: 3.12
14
- Requires-Dist: black (>=24.4.2,<25.0.0)
15
- Requires-Dist: click (>=8.1.7,<9.0.0)
16
- Requires-Dist: cookiecutter (>=2.6.0,<3.0.0)
17
- Requires-Dist: pydantic (>=2.7.4,<3.0.0)
18
- Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
19
- Requires-Dist: requests (>=2.32.3,<3.0.0)
20
- Requires-Dist: urllib3 (==1.26.6)
21
- Requires-Dist: websockets (>=12.0,<13.0)
22
- Description-Content-Type: text/markdown
23
-
24
- # Python SDK for Laminar AI
25
-
26
- Example use:
27
-
28
- ```python
29
- from lmnr import Laminar
30
-
31
- l = Laminar('<YOUR_PROJECT_API_KEY>')
32
- result = l.run(
33
- endpoint = 'my_endpoint_name',
34
- inputs = {'input_node_name': 'some_value'},
35
- env = {'OPENAI_API_KEY': 'sk-some-key'},
36
- metadata = {'session_id': 'your_custom_session_id'}
37
- )
38
- ```
39
-
40
- Resulting in:
41
-
42
- ```python
43
- >>> result
44
- EndpointRunResponse(outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}}, run_id='53b012d5-5759-48a6-a9c5-0011610e3669')
45
- ```
46
-
47
- ## CLI for code generation
48
-
49
- ### Basic usage
50
-
51
- ```
52
- lmnr pull <pipeline_name> <pipeline_version_name> --project-api-key <PROJECT_API_KEY>
53
- ```
54
-
55
- Read more [here](https://docs.lmnr.ai/api-reference/introduction#authentication) on how to get `PROJECT_API_KEY`.
56
-
57
- To import your pipeline
58
- ```python
59
- # submodule with the name of your pipeline will be generated in lmnr_engine.pipelines
60
- from lmnr_engine.pipelines.my_custom_pipeline import MyCustomPipeline
61
-
62
-
63
- pipeline = MyCustomPipeline()
64
- res = pipeline.run(
65
- inputs={
66
- "instruction": "Write me a short linked post about dev tool for LLM developers which they'll love"
67
- },
68
- env={
69
- "OPENAI_API_KEY": <OPENAI_API_KEY>,
70
- }
71
- )
72
- print(f"RESULT:\n{res}")
73
- ```
74
-
75
- ### Current functionality
76
- - Supports graph generation for graphs with Input, Output, and LLM nodes only
77
- - For LLM nodes, it only supports OpenAI and Anthropic models and doesn't support structured output
78
-
File without changes
File without changes