lmnr 0.2.14__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. lmnr/__init__.py +4 -4
  2. lmnr/sdk/client.py +161 -0
  3. lmnr/sdk/collector.py +177 -0
  4. lmnr/sdk/constants.py +1 -0
  5. lmnr/sdk/context.py +456 -0
  6. lmnr/sdk/decorators.py +277 -0
  7. lmnr/sdk/interface.py +339 -0
  8. lmnr/sdk/providers/__init__.py +2 -0
  9. lmnr/sdk/providers/base.py +28 -0
  10. lmnr/sdk/providers/fallback.py +131 -0
  11. lmnr/sdk/providers/openai.py +140 -0
  12. lmnr/sdk/providers/utils.py +33 -0
  13. lmnr/sdk/tracing_types.py +197 -0
  14. lmnr/sdk/types.py +69 -0
  15. lmnr/sdk/utils.py +102 -0
  16. lmnr-0.3.0.dist-info/METADATA +185 -0
  17. lmnr-0.3.0.dist-info/RECORD +21 -0
  18. lmnr/cli/__init__.py +0 -0
  19. lmnr/cli/__main__.py +0 -4
  20. lmnr/cli/cli.py +0 -232
  21. lmnr/cli/parser/__init__.py +0 -0
  22. lmnr/cli/parser/nodes/__init__.py +0 -45
  23. lmnr/cli/parser/nodes/code.py +0 -36
  24. lmnr/cli/parser/nodes/condition.py +0 -30
  25. lmnr/cli/parser/nodes/input.py +0 -25
  26. lmnr/cli/parser/nodes/json_extractor.py +0 -29
  27. lmnr/cli/parser/nodes/llm.py +0 -56
  28. lmnr/cli/parser/nodes/output.py +0 -27
  29. lmnr/cli/parser/nodes/router.py +0 -37
  30. lmnr/cli/parser/nodes/semantic_search.py +0 -53
  31. lmnr/cli/parser/nodes/types.py +0 -153
  32. lmnr/cli/parser/parser.py +0 -62
  33. lmnr/cli/parser/utils.py +0 -49
  34. lmnr/cli/zip.py +0 -16
  35. lmnr/sdk/endpoint.py +0 -186
  36. lmnr/sdk/registry.py +0 -29
  37. lmnr/sdk/remote_debugger.py +0 -148
  38. lmnr/types.py +0 -101
  39. lmnr-0.2.14.dist-info/METADATA +0 -187
  40. lmnr-0.2.14.dist-info/RECORD +0 -28
  41. {lmnr-0.2.14.dist-info → lmnr-0.3.0.dist-info}/LICENSE +0 -0
  42. {lmnr-0.2.14.dist-info → lmnr-0.3.0.dist-info}/WHEEL +0 -0
  43. {lmnr-0.2.14.dist-info → lmnr-0.3.0.dist-info}/entry_points.txt +0 -0
@@ -1,148 +0,0 @@
1
- from typing import Callable, Optional, Union
2
- from websockets.sync.client import connect
3
- import pydantic
4
- import websockets
5
- from lmnr.types import (
6
- DeregisterDebuggerRequest,
7
- NodeFunction,
8
- NodeInput,
9
- RegisterDebuggerRequest,
10
- SDKError,
11
- ToolCallError,
12
- ToolCallRequest,
13
- ToolCallResponse,
14
- )
15
- import json
16
- from concurrent.futures import ThreadPoolExecutor
17
- from threading import Thread
18
- import time
19
-
20
-
21
- class RemoteDebugger:
22
- def __init__(
23
- self,
24
- project_api_key: str,
25
- dev_session_id: str,
26
- tools: Union[dict[str, NodeFunction], list[Callable[..., NodeInput]]] = [],
27
- ):
28
- # for simplicity and backwards compatibility, we allow the user to pass a list
29
- if isinstance(tools, list):
30
- tools = {f.__name__: NodeFunction(f.__name__, f) for f in tools}
31
-
32
- self.project_api_key = project_api_key
33
- self.url = "wss://api.lmnr.ai/v2/endpoint/ws"
34
- self.tools = tools
35
- self.stop_flag = False
36
- self.session = dev_session_id
37
- self.executor = ThreadPoolExecutor(5)
38
- self.running_tasks = {} # dict[str, Future] from request_id to Future
39
-
40
- def start(self) -> Optional[str]:
41
- self.stop_flag = False
42
- self.executor.submit(self._run)
43
- return self.session
44
-
45
- def stop(self):
46
- self.stop_flag = True
47
- self.executor.shutdown()
48
- self.session = None
49
-
50
- def _run(self, backoff=1):
51
- assert self.session is not None, "Session ID not set"
52
- request = RegisterDebuggerRequest(debuggerSessionId=self.session)
53
- try:
54
- self._connect_and_run(request, backoff)
55
- except Exception as e:
56
- print(f"Could not connect to server. Retrying in {backoff} seconds...")
57
- time.sleep(backoff)
58
- self._run(min(backoff * 2, 60))
59
-
60
- def _connect_and_run(self, request: RegisterDebuggerRequest, backoff=1):
61
- with connect(
62
- self.url,
63
- additional_headers={"Authorization": f"Bearer {self.project_api_key}"},
64
- ) as websocket:
65
- websocket.send(request.model_dump_json())
66
- print(self._format_session_id_and_registerd_functions())
67
- req_id = None
68
-
69
- while not self.stop_flag:
70
- # first check if any of the running tasks are done
71
- done_tasks = []
72
- for req_id, future in self.running_tasks.items():
73
- if not future.done():
74
- continue
75
- done_tasks.append(req_id)
76
- try:
77
- response = future.result()
78
- except Exception as e:
79
- error_message = (
80
- "Error occurred while running tool" + f"{tool.__name__}: {e}"
81
- )
82
- e = ToolCallError(error=error_message, reqId=req_id)
83
- websocket.send(e.model_dump_json())
84
- continue
85
- formatted_response = None
86
- try:
87
- formatted_response = ToolCallResponse(
88
- reqId=req_id, response=response
89
- )
90
- except pydantic.ValidationError:
91
- formatted_response = ToolCallResponse(
92
- reqId=req_id, response=str(response)
93
- )
94
- websocket.send(formatted_response.model_dump_json())
95
- for req_id in done_tasks:
96
- del self.running_tasks[req_id]
97
- try:
98
- # blocks the thread until a message
99
- # is received or a timeout (0.1 seconds) occurs
100
- message = websocket.recv(0.1)
101
- except TimeoutError:
102
- continue
103
- except websockets.exceptions.ConnectionClosedError:
104
- print("Connection interrupted by server. Trying to reconnect...")
105
- self._run()
106
- try:
107
- tool_call = ToolCallRequest.model_validate_json(message)
108
- req_id = tool_call.reqId
109
- except Exception:
110
- raise SDKError(f"Invalid message received:\n{message}")
111
- matching_tool = self.tools.get(tool_call.toolCall.function.name)
112
- if matching_tool is None:
113
- error_message = (
114
- f"Tool {tool_call.toolCall.function.name} not found"
115
- + ". Registered tools: "
116
- + ", ".join(self.tools.keys())
117
- )
118
- e = ToolCallError(error=error_message, reqId=req_id)
119
- websocket.send(e.model_dump_json())
120
- continue
121
- tool = matching_tool.function
122
-
123
- # default the arguments to an empty dictionary
124
- arguments = {}
125
- try:
126
- arguments = json.loads(tool_call.toolCall.function.arguments)
127
- except Exception:
128
- pass
129
- self.running_tasks[tool_call.reqId] = self.executor.submit(tool, **arguments)
130
- websocket.send(
131
- DeregisterDebuggerRequest(
132
- debuggerSessionId=self.session, deregister=True
133
- ).model_dump_json()
134
- )
135
-
136
- def _format_session_id_and_registerd_functions(self) -> str:
137
- registered_functions = ",\n".join(["- " + k for k in self.tools.keys()])
138
- return f"""
139
- ========================================
140
- Dev Session ID:
141
- {self.session}
142
- ========================================
143
-
144
- Registered functions:
145
- {registered_functions}
146
-
147
- ========================================
148
- """
lmnr/types.py DELETED
@@ -1,101 +0,0 @@
1
- import requests
2
- import pydantic
3
- import uuid
4
- from typing import Callable, Union, Optional
5
-
6
-
7
- class ChatMessage(pydantic.BaseModel):
8
- role: str
9
- content: str
10
-
11
-
12
- class ConditionedValue(pydantic.BaseModel):
13
- condition: str
14
- value: "NodeInput"
15
-
16
-
17
- NodeInput = Union[str, list[ChatMessage], ConditionedValue] # TypeAlias
18
-
19
-
20
- class EndpointRunRequest(pydantic.BaseModel):
21
- inputs: dict[str, NodeInput]
22
- endpoint: str
23
- env: dict[str, str] = pydantic.Field(default_factory=dict)
24
- metadata: dict[str, str] = pydantic.Field(default_factory=dict)
25
-
26
-
27
- class EndpointRunResponse(pydantic.BaseModel):
28
- outputs: dict[str, dict[str, NodeInput]]
29
- run_id: str
30
-
31
-
32
- class EndpointRunError(Exception):
33
- error_code: str
34
- error_message: str
35
-
36
- def __init__(self, response: requests.Response):
37
- try:
38
- resp_json = response.json()
39
- self.error_code = resp_json["error_code"]
40
- self.error_message = resp_json["error_message"]
41
- super().__init__(self.error_message)
42
- except Exception:
43
- super().__init__(response.text)
44
-
45
- def __str__(self) -> str:
46
- try:
47
- return str(
48
- {"error_code": self.error_code, "error_message": self.error_message}
49
- )
50
- except Exception:
51
- return super().__str__()
52
-
53
-
54
- class SDKError(Exception):
55
- def __init__(self, error_message: str):
56
- super().__init__(error_message)
57
-
58
-
59
- class ToolCallFunction(pydantic.BaseModel):
60
- name: str
61
- arguments: str
62
-
63
-
64
- class ToolCall(pydantic.BaseModel):
65
- id: Optional[str]
66
- type: Optional[str]
67
- function: ToolCallFunction
68
-
69
-
70
- # TODO: allow snake_case and manually convert to camelCase
71
- class ToolCallRequest(pydantic.BaseModel):
72
- reqId: uuid.UUID
73
- toolCall: ToolCall
74
-
75
-
76
- class ToolCallResponse(pydantic.BaseModel):
77
- reqId: uuid.UUID
78
- response: NodeInput
79
-
80
-
81
- class ToolCallError(pydantic.BaseModel):
82
- reqId: uuid.UUID
83
- error: str
84
-
85
-
86
- class RegisterDebuggerRequest(pydantic.BaseModel):
87
- debuggerSessionId: str
88
-
89
-
90
- class DeregisterDebuggerRequest(pydantic.BaseModel):
91
- debuggerSessionId: str
92
- deregister: bool
93
-
94
-
95
- class NodeFunction:
96
- node_name: str
97
- function: Callable[..., NodeInput]
98
-
99
- def __init__(self, node_name: str, function: Callable[..., NodeInput]):
100
- self.node_name = node_name
101
- self.function = function
@@ -1,187 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: lmnr
3
- Version: 0.2.14
4
- Summary: Python SDK for Laminar AI
5
- License: Apache-2.0
6
- Author: lmnr.ai
7
- Requires-Python: >=3.9,<4.0
8
- Classifier: License :: OSI Approved :: Apache Software License
9
- Classifier: Programming Language :: Python :: 3
10
- Classifier: Programming Language :: Python :: 3.9
11
- Classifier: Programming Language :: Python :: 3.10
12
- Classifier: Programming Language :: Python :: 3.11
13
- Classifier: Programming Language :: Python :: 3.12
14
- Requires-Dist: black (>=24.4.2,<25.0.0)
15
- Requires-Dist: click (>=8.1.7,<9.0.0)
16
- Requires-Dist: cookiecutter (>=2.6.0,<3.0.0)
17
- Requires-Dist: lmnr-baml (>=0.40.1,<0.41.0)
18
- Requires-Dist: pydantic (>=2.7.4,<3.0.0)
19
- Requires-Dist: pystache (>=0.6.5,<0.7.0)
20
- Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
21
- Requires-Dist: requests (>=2.32.3,<3.0.0)
22
- Requires-Dist: watchdog (>=4.0.2,<5.0.0)
23
- Requires-Dist: websockets (>=12.0,<13.0)
24
- Description-Content-Type: text/markdown
25
-
26
- # Laminar AI
27
-
28
- This repo provides core for code generation, Laminar CLI, and Laminar SDK.
29
-
30
- ## Quickstart
31
- ```sh
32
- python3 -m venv .myenv
33
- source .myenv/bin/activate # or use your favorite env management tool
34
-
35
- pip install lmnr
36
- ```
37
-
38
- ## Features
39
-
40
- - Make Laminar endpoint calls from your Python code
41
- - Make Laminar endpoint calls that can run your own functions as tools
42
- - CLI to generate code from pipelines you build on Laminar or execute your own functions while you test your flows in workshop
43
-
44
- ## Making Laminar endpoint calls
45
-
46
- After you are ready to use your pipeline in your code, deploy it in Laminar following the [docs](https://docs.lmnr.ai/pipeline/run-save-deploy#deploying-a-pipeline-version).
47
-
48
- Once your pipeline is deployed, you can call it from Python in just a few lines.
49
-
50
- Example use:
51
-
52
- ```python
53
- from lmnr import Laminar
54
-
55
- l = Laminar('<YOUR_PROJECT_API_KEY>')
56
- result = l.run(
57
- endpoint = 'my_endpoint_name',
58
- inputs = {'input_node_name': 'some_value'},
59
- # all environment variables
60
- env = {'OPENAI_API_KEY': 'sk-some-key'},
61
- # any metadata to attach to this run's trace
62
- metadata = {'session_id': 'your_custom_session_id'}
63
- )
64
- ```
65
-
66
- Resulting in:
67
-
68
- ```python
69
- >>> result
70
- EndpointRunResponse(
71
- outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}},
72
- # useful to locate your trace
73
- run_id='53b012d5-5759-48a6-a9c5-0011610e3669'
74
- )
75
- ```
76
-
77
- ## Making calls to pipelines that run your own logic
78
-
79
- If your pipeline contains tool call nodes, they will be able to call your local code.
80
- The only difference is that you need to pass references
81
- to the functions you want to call right into our SDK.
82
-
83
- Example use:
84
-
85
- ```python
86
- from lmnr import Laminar, NodeInput
87
-
88
- # adding **kwargs is safer, in case an LLM produces more arguments than needed
89
- def my_tool(arg1: string, arg2: string, **kwargs) -> NodeInput {
90
- return f'{arg1}&{arg2}'
91
- }
92
-
93
- l = Laminar('<YOUR_PROJECT_API_KEY>')
94
- result = l.run(
95
- endpoint = 'my_endpoint_name',
96
- inputs = {'input_node_name': 'some_value'},
97
- # all environment variables
98
- env = {'OPENAI_API_KEY': '<YOUR_MODEL_PROVIDER_KEY>'},
99
- # any metadata to attach to this run's trace
100
- metadata = {'session_id': 'your_custom_session_id'},
101
- # specify as many tools as needed.
102
- # Each tool name must match tool node name in the pipeline
103
- tools=[my_tool]
104
- )
105
- ```
106
-
107
- ## LaminarRemoteDebugger
108
-
109
- If your pipeline contains local call nodes, they will be able to call code right on your machine.
110
-
111
- ### Step by step instructions to connect to Laminar:
112
-
113
- #### 1. Create your pipeline with function call nodes
114
-
115
- Add function calls to your pipeline; these are signature definitions of your functions
116
-
117
- #### 2. Implement the functions
118
-
119
- At the root level, create a file: `pipeline.py`
120
-
121
- Annotate functions with the same name.
122
-
123
- Example:
124
-
125
- ```python
126
- from lmnr import Pipeline
127
-
128
- lmnr = Pipeline()
129
-
130
- @lmnr.func("foo") # the node in the pipeline is called foo and has one parameter arg
131
- def custom_logic(arg: str) -> str:
132
- return arg * 10
133
- ```
134
-
135
- #### 3. Link lmnr.ai workshop to your machine
136
-
137
- 1. At the root level, create a `.env` file if not already
138
- 1. In project settings, create or copy a project api key.
139
- 1. Add an entry in `.env` with: `LMNR_PROJECT_API_KEY=s0meKey...`
140
- 1. In project settings create or copy a dev session. These are your individual sessions.
141
- 1. Add an entry in `.env` with: `LMNR_DEV_SESSION_ID=01234567-89ab-cdef-0123-4567890ab`
142
-
143
- #### 4. Run the dev environment
144
-
145
- ```sh
146
- lmnr dev
147
- ```
148
-
149
- This will start a session, try to persist it, and reload the session on files change.
150
-
151
- ## CLI for code generation
152
-
153
- ### Basic usage
154
-
155
- ```
156
- lmnr pull <pipeline_name> <pipeline_version_name> --project-api-key <PROJECT_API_KEY>
157
- ```
158
-
159
- Note that `lmnr` CLI command will only be available from within the virtual environment
160
- where you have installed the package.
161
-
162
- To import your pipeline
163
- ```python
164
- # submodule with the name of your pipeline will be generated in lmnr_engine.pipelines
165
- from lmnr_engine.pipelines.my_custom_pipeline import MyCustomPipeline
166
-
167
-
168
- pipeline = MyCustomPipeline()
169
- res = pipeline.run(
170
- inputs={
171
- "instruction": "Write me a short linkedin post about a dev tool for LLM developers"
172
- },
173
- env={
174
- "OPENAI_API_KEY": <OPENAI_API_KEY>,
175
- }
176
- )
177
- print(f"Pipeline run result:\n{res}")
178
- ```
179
-
180
- ### Current functionality
181
- - Supports graph generation for graphs with the following nodes: Input, Output, LLM, Router, Code.
182
- - For LLM nodes, it only supports OpenAI and Anthropic models. Structured output in LLM nodes will be supported soon.
183
-
184
- ## PROJECT_API_KEY
185
-
186
- Read more [here](https://docs.lmnr.ai/api-reference/introduction#authentication) on how to get `PROJECT_API_KEY`.
187
-
@@ -1,28 +0,0 @@
1
- lmnr/__init__.py,sha256=BBJ87AiHC_OpaYOCzF8QSsf7eO3LlJPOCBXHNKurbE4,235
2
- lmnr/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- lmnr/cli/__main__.py,sha256=8hDtWlaFZK24KhfNq_ZKgtXqYHsDQDetukOCMlsbW0Q,59
4
- lmnr/cli/cli.py,sha256=Wz2ir-Mb0ddiNDTuA-9N7QSJzxWnMHlO7-MP2PK0-PU,7615
5
- lmnr/cli/parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
- lmnr/cli/parser/nodes/__init__.py,sha256=2MkPdKulb1kuNe6aT71CaqBA8iBrXyb5pq5bu_EvCb8,1052
7
- lmnr/cli/parser/nodes/code.py,sha256=8lTPBibUzaw_t-9QoPljhxH3KA4CLn9DJjA-iWpprOA,933
8
- lmnr/cli/parser/nodes/condition.py,sha256=AJny0ILXbSy1hTwsRvZvDUqts9INNx63yQSkD7Dp7KU,740
9
- lmnr/cli/parser/nodes/input.py,sha256=o8EfCmBbNyQL8FzmAtgnNDFlWqZmRAgkbw4HzKXZepU,539
10
- lmnr/cli/parser/nodes/json_extractor.py,sha256=CnVwZ-wU_Ro4WkJLw9Uk_SS3yvZ66UPa5mK4JdkM8w4,723
11
- lmnr/cli/parser/nodes/llm.py,sha256=Wpmo9cfNiYN9DRbj7oBS6RYcKXLwlGtF6RdF4jFQm5I,1866
12
- lmnr/cli/parser/nodes/output.py,sha256=1XBppSscxM01kfZhE9oOh2GgdCVzyPVe2RAxLI5HmUc,665
13
- lmnr/cli/parser/nodes/router.py,sha256=dmCx4ho8_GdFJXQa8UevMf_uEP7AKBv_MJ2zpLC6Vck,894
14
- lmnr/cli/parser/nodes/semantic_search.py,sha256=DWDPpV78XZ7vPIaPd86FbeDFAnKah4e61M1TOzwnt84,1352
15
- lmnr/cli/parser/nodes/types.py,sha256=OVXj-iMEDY9nPKCX1-zddtoszZcUL3CXYYryI7O3et0,6094
16
- lmnr/cli/parser/parser.py,sha256=yDa-ysAkh6si_hHU8Gw8EdtNWc4pFc5RbvgWEXGEPys,2370
17
- lmnr/cli/parser/utils.py,sha256=1oy6BApHXOF7BTXbP8v3Oi9bwOdWZjoxDlRIOfXVxro,1169
18
- lmnr/cli/zip.py,sha256=u2-LcYtQdZ_FIW0-PM-WGjclNPoB8v6OecrI79PyLPw,607
19
- lmnr/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
- lmnr/sdk/endpoint.py,sha256=0HjcxMUcJz-klFZO2f5xtTaoLjcaEb8vrJ_YldTWUc8,7467
21
- lmnr/sdk/registry.py,sha256=sEYQFOjO72YvgBSEkBrvoewFExoyBzx6nELgBarvD6Y,755
22
- lmnr/sdk/remote_debugger.py,sha256=c0a6_YZJmSmIyTL8Ybu6Ln4k8KiybwqcGqWude_Pi10,5756
23
- lmnr/types.py,sha256=3HpLBQZr6F5YMISHYLnzzyrTwUttNqJxpyobw31YYJQ,2347
24
- lmnr-0.2.14.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
25
- lmnr-0.2.14.dist-info/METADATA,sha256=RVGrQzxSx9qTIpSIKf--UYt1QMjRxqCulYOkXacqu2Y,5565
26
- lmnr-0.2.14.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
27
- lmnr-0.2.14.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
28
- lmnr-0.2.14.dist-info/RECORD,,
File without changes
File without changes