lmnr 0.2.13__tar.gz → 0.2.14__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lmnr-0.2.13 → lmnr-0.2.14}/PKG-INFO +29 -23
- {lmnr-0.2.13 → lmnr-0.2.14}/README.md +26 -21
- {lmnr-0.2.13 → lmnr-0.2.14}/pyproject.toml +5 -4
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/cli.py +73 -20
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/sdk/remote_debugger.py +51 -39
- {lmnr-0.2.13 → lmnr-0.2.14}/LICENSE +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/__init__.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/__init__.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/__main__.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/__init__.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/__init__.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/code.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/condition.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/input.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/json_extractor.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/llm.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/output.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/router.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/semantic_search.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/nodes/types.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/parser.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/parser/utils.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/cli/zip.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/sdk/__init__.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/sdk/endpoint.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/sdk/registry.py +0 -0
- {lmnr-0.2.13 → lmnr-0.2.14}/src/lmnr/types.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lmnr
|
3
|
-
Version: 0.2.
|
3
|
+
Version: 0.2.14
|
4
4
|
Summary: Python SDK for Laminar AI
|
5
5
|
License: Apache-2.0
|
6
6
|
Author: lmnr.ai
|
@@ -14,11 +14,12 @@ Classifier: Programming Language :: Python :: 3.12
|
|
14
14
|
Requires-Dist: black (>=24.4.2,<25.0.0)
|
15
15
|
Requires-Dist: click (>=8.1.7,<9.0.0)
|
16
16
|
Requires-Dist: cookiecutter (>=2.6.0,<3.0.0)
|
17
|
-
Requires-Dist: lmnr-baml (>=0.40.
|
17
|
+
Requires-Dist: lmnr-baml (>=0.40.1,<0.41.0)
|
18
18
|
Requires-Dist: pydantic (>=2.7.4,<3.0.0)
|
19
19
|
Requires-Dist: pystache (>=0.6.5,<0.7.0)
|
20
20
|
Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
|
21
21
|
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
22
|
+
Requires-Dist: watchdog (>=4.0.2,<5.0.0)
|
22
23
|
Requires-Dist: websockets (>=12.0,<13.0)
|
23
24
|
Description-Content-Type: text/markdown
|
24
25
|
|
@@ -38,8 +39,7 @@ pip install lmnr
|
|
38
39
|
|
39
40
|
- Make Laminar endpoint calls from your Python code
|
40
41
|
- Make Laminar endpoint calls that can run your own functions as tools
|
41
|
-
- CLI to generate code from pipelines you build on Laminar
|
42
|
-
- `LaminarRemoteDebugger` to execute your own functions while you test your flows in workshop
|
42
|
+
- CLI to generate code from pipelines you build on Laminar or execute your own functions while you test your flows in workshop
|
43
43
|
|
44
44
|
## Making Laminar endpoint calls
|
45
45
|
|
@@ -106,41 +106,47 @@ result = l.run(
|
|
106
106
|
|
107
107
|
## LaminarRemoteDebugger
|
108
108
|
|
109
|
-
If your pipeline contains
|
110
|
-
|
111
|
-
|
109
|
+
If your pipeline contains local call nodes, they will be able to call code right on your machine.
|
110
|
+
|
111
|
+
### Step by step instructions to connect to Laminar:
|
112
112
|
|
113
|
-
|
113
|
+
#### 1. Create your pipeline with function call nodes
|
114
114
|
|
115
|
-
|
115
|
+
Add function calls to your pipeline; these are signature definitions of your functions
|
116
116
|
|
117
|
-
|
117
|
+
#### 2. Implement the functions
|
118
118
|
|
119
|
-
|
119
|
+
At the root level, create a file: `pipeline.py`
|
120
|
+
|
121
|
+
Annotate functions with the same name.
|
120
122
|
|
121
123
|
Example:
|
122
124
|
|
123
125
|
```python
|
124
|
-
from lmnr import
|
126
|
+
from lmnr import Pipeline
|
125
127
|
|
126
|
-
|
127
|
-
def my_tool(arg1: string, arg2: string, **kwargs) -> NodeInput:
|
128
|
-
return f'{arg1}&{arg2}'
|
128
|
+
lmnr = Pipeline()
|
129
129
|
|
130
|
-
|
131
|
-
|
130
|
+
@lmnr.func("foo") # the node in the pipeline is called foo and has one parameter arg
|
131
|
+
def custom_logic(arg: str) -> str:
|
132
|
+
return arg * 10
|
132
133
|
```
|
133
134
|
|
134
|
-
|
135
|
-
to call your local functions.
|
135
|
+
#### 3. Link lmnr.ai workshop to your machine
|
136
136
|
|
137
|
-
|
137
|
+
1. At the root level, create a `.env` file if not already
|
138
|
+
1. In project settings, create or copy a project api key.
|
139
|
+
1. Add an entry in `.env` with: `LMNR_PROJECT_API_KEY=s0meKey...`
|
140
|
+
1. In project settings create or copy a dev session. These are your individual sessions.
|
141
|
+
1. Add an entry in `.env` with: `LMNR_DEV_SESSION_ID=01234567-89ab-cdef-0123-4567890ab`
|
138
142
|
|
139
|
-
|
143
|
+
#### 4. Run the dev environment
|
140
144
|
|
141
|
-
|
145
|
+
```sh
|
146
|
+
lmnr dev
|
147
|
+
```
|
142
148
|
|
143
|
-
|
149
|
+
This will start a session, try to persist it, and reload the session on files change.
|
144
150
|
|
145
151
|
## CLI for code generation
|
146
152
|
|
@@ -14,8 +14,7 @@ pip install lmnr
|
|
14
14
|
|
15
15
|
- Make Laminar endpoint calls from your Python code
|
16
16
|
- Make Laminar endpoint calls that can run your own functions as tools
|
17
|
-
- CLI to generate code from pipelines you build on Laminar
|
18
|
-
- `LaminarRemoteDebugger` to execute your own functions while you test your flows in workshop
|
17
|
+
- CLI to generate code from pipelines you build on Laminar or execute your own functions while you test your flows in workshop
|
19
18
|
|
20
19
|
## Making Laminar endpoint calls
|
21
20
|
|
@@ -82,41 +81,47 @@ result = l.run(
|
|
82
81
|
|
83
82
|
## LaminarRemoteDebugger
|
84
83
|
|
85
|
-
If your pipeline contains
|
86
|
-
|
87
|
-
|
84
|
+
If your pipeline contains local call nodes, they will be able to call code right on your machine.
|
85
|
+
|
86
|
+
### Step by step instructions to connect to Laminar:
|
88
87
|
|
89
|
-
|
88
|
+
#### 1. Create your pipeline with function call nodes
|
90
89
|
|
91
|
-
|
90
|
+
Add function calls to your pipeline; these are signature definitions of your functions
|
92
91
|
|
93
|
-
|
92
|
+
#### 2. Implement the functions
|
94
93
|
|
95
|
-
|
94
|
+
At the root level, create a file: `pipeline.py`
|
95
|
+
|
96
|
+
Annotate functions with the same name.
|
96
97
|
|
97
98
|
Example:
|
98
99
|
|
99
100
|
```python
|
100
|
-
from lmnr import
|
101
|
+
from lmnr import Pipeline
|
101
102
|
|
102
|
-
|
103
|
-
def my_tool(arg1: string, arg2: string, **kwargs) -> NodeInput:
|
104
|
-
return f'{arg1}&{arg2}'
|
103
|
+
lmnr = Pipeline()
|
105
104
|
|
106
|
-
|
107
|
-
|
105
|
+
@lmnr.func("foo") # the node in the pipeline is called foo and has one parameter arg
|
106
|
+
def custom_logic(arg: str) -> str:
|
107
|
+
return arg * 10
|
108
108
|
```
|
109
109
|
|
110
|
-
|
111
|
-
to call your local functions.
|
110
|
+
#### 3. Link lmnr.ai workshop to your machine
|
112
111
|
|
113
|
-
|
112
|
+
1. At the root level, create a `.env` file if not already
|
113
|
+
1. In project settings, create or copy a project api key.
|
114
|
+
1. Add an entry in `.env` with: `LMNR_PROJECT_API_KEY=s0meKey...`
|
115
|
+
1. In project settings create or copy a dev session. These are your individual sessions.
|
116
|
+
1. Add an entry in `.env` with: `LMNR_DEV_SESSION_ID=01234567-89ab-cdef-0123-4567890ab`
|
114
117
|
|
115
|
-
|
118
|
+
#### 4. Run the dev environment
|
116
119
|
|
117
|
-
|
120
|
+
```sh
|
121
|
+
lmnr dev
|
122
|
+
```
|
118
123
|
|
119
|
-
|
124
|
+
This will start a session, try to persist it, and reload the session on files change.
|
120
125
|
|
121
126
|
## CLI for code generation
|
122
127
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
[project]
|
2
2
|
name = "lmnr"
|
3
|
-
version = "0.2.
|
3
|
+
version = "0.2.14"
|
4
4
|
description = "Python SDK for Laminar AI"
|
5
5
|
authors = [
|
6
6
|
{ name = "lmnr.ai", email = "founders@lmnr.ai" }
|
@@ -11,7 +11,7 @@ license = "Apache-2.0"
|
|
11
11
|
|
12
12
|
[tool.poetry]
|
13
13
|
name = "lmnr"
|
14
|
-
version = "0.2.
|
14
|
+
version = "0.2.14"
|
15
15
|
description = "Python SDK for Laminar AI"
|
16
16
|
authors = ["lmnr.ai"]
|
17
17
|
readme = "README.md"
|
@@ -27,7 +27,8 @@ websockets = "^12.0"
|
|
27
27
|
cookiecutter = "^2.6.0"
|
28
28
|
python-dotenv = "^1.0.1"
|
29
29
|
pystache = "^0.6.5"
|
30
|
-
lmnr-baml = "^0.40.
|
30
|
+
lmnr-baml = "^0.40.1"
|
31
|
+
watchdog = "^4.0.2"
|
31
32
|
|
32
33
|
[build-system]
|
33
34
|
requires = ["poetry-core"]
|
@@ -40,4 +41,4 @@ lmnr = "lmnr.cli.cli:cli"
|
|
40
41
|
lmnr = "lmnr.cli.cli:cli"
|
41
42
|
|
42
43
|
[project.optional-dependencies]
|
43
|
-
test = ["pytest"]
|
44
|
+
test = ["pytest"]
|
@@ -1,7 +1,8 @@
|
|
1
1
|
from pathlib import Path
|
2
2
|
import sys
|
3
3
|
import requests
|
4
|
-
from dotenv import load_dotenv
|
4
|
+
from dotenv import load_dotenv, find_dotenv
|
5
|
+
import importlib
|
5
6
|
import os
|
6
7
|
import click
|
7
8
|
import logging
|
@@ -11,6 +12,7 @@ from pydantic.alias_generators import to_pascal
|
|
11
12
|
from lmnr.cli.zip import zip_directory
|
12
13
|
from lmnr.sdk.registry import Registry as Pipeline
|
13
14
|
from lmnr.sdk.remote_debugger import RemoteDebugger
|
15
|
+
from lmnr.types import NodeFunction
|
14
16
|
|
15
17
|
from .parser.parser import runnable_graph_to_template_vars
|
16
18
|
|
@@ -139,21 +141,7 @@ def deploy(endpoint_id, project_api_key):
|
|
139
141
|
Path.unlink(zip_file_path, missing_ok=True)
|
140
142
|
|
141
143
|
|
142
|
-
|
143
|
-
@click.option(
|
144
|
-
"-p",
|
145
|
-
"--project-api-key",
|
146
|
-
help="Project API key",
|
147
|
-
)
|
148
|
-
def dev(project_api_key):
|
149
|
-
project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
|
150
|
-
if not project_api_key:
|
151
|
-
load_dotenv()
|
152
|
-
project_api_key = os.environ.get("LMNR_PROJECT_API_KEY")
|
153
|
-
if not project_api_key:
|
154
|
-
raise ValueError("LMNR_PROJECT_API_KEY is not set")
|
155
|
-
|
156
|
-
cur_dir = os.getcwd() # e.g. /Users/username/project_name
|
144
|
+
def _load_functions(cur_dir: str) -> dict[str, NodeFunction]:
|
157
145
|
parent_dir, name = os.path.split(cur_dir) # e.g. /Users/username, project_name
|
158
146
|
|
159
147
|
# Needed to __import__ pipeline.py
|
@@ -164,7 +152,13 @@ def dev(project_api_key):
|
|
164
152
|
sys.path.insert(0, cur_dir)
|
165
153
|
|
166
154
|
module_name = f"{name}.pipeline"
|
167
|
-
|
155
|
+
if module_name in sys.modules:
|
156
|
+
# Reload the module to get the updated version
|
157
|
+
importlib.reload(sys.modules[module_name])
|
158
|
+
else:
|
159
|
+
# Import the module for the first time
|
160
|
+
__import__(module_name)
|
161
|
+
|
168
162
|
module = sys.modules[module_name]
|
169
163
|
|
170
164
|
matches = [v for v in module.__dict__.values() if isinstance(v, Pipeline)]
|
@@ -174,6 +168,65 @@ def dev(project_api_key):
|
|
174
168
|
raise ValueError("Multiple Pipelines found in the module")
|
175
169
|
pipeline = matches[0]
|
176
170
|
|
177
|
-
|
178
|
-
|
179
|
-
|
171
|
+
return pipeline.functions
|
172
|
+
|
173
|
+
from watchdog.observers import Observer
|
174
|
+
from watchdog.events import PatternMatchingEventHandler
|
175
|
+
import time
|
176
|
+
|
177
|
+
class SimpleEventHandler(PatternMatchingEventHandler):
|
178
|
+
def __init__(self, project_api_key: str, session_id: str, functions: dict[str, NodeFunction]):
|
179
|
+
super().__init__(ignore_patterns=["*.pyc*", "*.pyo", "**/__pycache__"])
|
180
|
+
self.project_api_key = project_api_key
|
181
|
+
self.session_id = session_id
|
182
|
+
self.functions = functions
|
183
|
+
self.debugger = RemoteDebugger(project_api_key, session_id, functions)
|
184
|
+
self.debugger.start()
|
185
|
+
|
186
|
+
def on_any_event(self, event):
|
187
|
+
print(f"Files at {event.src_path} updated. Restarting debugger...")
|
188
|
+
self.debugger.stop()
|
189
|
+
self.functions = _load_functions(os.getcwd())
|
190
|
+
self.debugger = RemoteDebugger(self.project_api_key, self.session_id, self.functions)
|
191
|
+
self.debugger.start()
|
192
|
+
|
193
|
+
@cli.command(name="dev")
|
194
|
+
@click.option(
|
195
|
+
"-p",
|
196
|
+
"--project-api-key",
|
197
|
+
help="Project API key. If not provided, LMNR_PROJECT_API_KEY from os.environ or .env is used",
|
198
|
+
)
|
199
|
+
@click.option(
|
200
|
+
"-s",
|
201
|
+
"--dev-session-id",
|
202
|
+
help="Dev session ID. If not provided, LMNR_DEV_SESSION_ID from os.environ or .env is used",
|
203
|
+
)
|
204
|
+
def dev(project_api_key, dev_session_id):
|
205
|
+
env_path = find_dotenv(usecwd=True)
|
206
|
+
project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
|
207
|
+
if not project_api_key:
|
208
|
+
load_dotenv(env_path=env_path)
|
209
|
+
project_api_key = os.environ.get("LMNR_PROJECT_API_KEY")
|
210
|
+
if not project_api_key:
|
211
|
+
raise ValueError("LMNR_PROJECT_API_KEY is not set")
|
212
|
+
|
213
|
+
session_id = dev_session_id or os.environ.get("LMNR_DEV_SESSION_ID")
|
214
|
+
if not session_id:
|
215
|
+
load_dotenv(env_path=env_path)
|
216
|
+
session_id = os.environ.get("LMNR_DEV_SESSION_ID")
|
217
|
+
if not session_id:
|
218
|
+
raise ValueError("LMNR_DEV_SESSION_ID is not set")
|
219
|
+
cur_dir = os.getcwd() # e.g. /Users/username/project_name
|
220
|
+
functions = _load_functions(cur_dir)
|
221
|
+
|
222
|
+
observer = Observer()
|
223
|
+
handler = SimpleEventHandler(project_api_key, session_id, functions)
|
224
|
+
observer.schedule(handler, cur_dir, recursive=True)
|
225
|
+
observer.start()
|
226
|
+
try:
|
227
|
+
while True:
|
228
|
+
time.sleep(1)
|
229
|
+
except KeyboardInterrupt:
|
230
|
+
handler.debugger.stop()
|
231
|
+
observer.stop()
|
232
|
+
observer.join()
|
@@ -12,15 +12,17 @@ from lmnr.types import (
|
|
12
12
|
ToolCallRequest,
|
13
13
|
ToolCallResponse,
|
14
14
|
)
|
15
|
-
import uuid
|
16
15
|
import json
|
16
|
+
from concurrent.futures import ThreadPoolExecutor
|
17
17
|
from threading import Thread
|
18
|
+
import time
|
18
19
|
|
19
20
|
|
20
21
|
class RemoteDebugger:
|
21
22
|
def __init__(
|
22
23
|
self,
|
23
24
|
project_api_key: str,
|
25
|
+
dev_session_id: str,
|
24
26
|
tools: Union[dict[str, NodeFunction], list[Callable[..., NodeInput]]] = [],
|
25
27
|
):
|
26
28
|
# for simplicity and backwards compatibility, we allow the user to pass a list
|
@@ -30,28 +32,32 @@ class RemoteDebugger:
|
|
30
32
|
self.project_api_key = project_api_key
|
31
33
|
self.url = "wss://api.lmnr.ai/v2/endpoint/ws"
|
32
34
|
self.tools = tools
|
33
|
-
self.thread = Thread(target=self._run)
|
34
35
|
self.stop_flag = False
|
35
|
-
self.session =
|
36
|
-
|
36
|
+
self.session = dev_session_id
|
37
|
+
self.executor = ThreadPoolExecutor(5)
|
38
|
+
self.running_tasks = {} # dict[str, Future] from request_id to Future
|
39
|
+
|
37
40
|
def start(self) -> Optional[str]:
|
38
41
|
self.stop_flag = False
|
39
|
-
self.
|
40
|
-
self.thread.start()
|
42
|
+
self.executor.submit(self._run)
|
41
43
|
return self.session
|
42
44
|
|
43
45
|
def stop(self):
|
44
46
|
self.stop_flag = True
|
45
|
-
self.
|
47
|
+
self.executor.shutdown()
|
46
48
|
self.session = None
|
47
|
-
# python allows running threads only once, so we need to create
|
48
|
-
# a new thread
|
49
|
-
# in case the user wants to start the debugger again
|
50
|
-
self.thread = Thread(target=self._run)
|
51
49
|
|
52
|
-
def _run(self):
|
50
|
+
def _run(self, backoff=1):
|
53
51
|
assert self.session is not None, "Session ID not set"
|
54
52
|
request = RegisterDebuggerRequest(debuggerSessionId=self.session)
|
53
|
+
try:
|
54
|
+
self._connect_and_run(request, backoff)
|
55
|
+
except Exception as e:
|
56
|
+
print(f"Could not connect to server. Retrying in {backoff} seconds...")
|
57
|
+
time.sleep(backoff)
|
58
|
+
self._run(min(backoff * 2, 60))
|
59
|
+
|
60
|
+
def _connect_and_run(self, request: RegisterDebuggerRequest, backoff=1):
|
55
61
|
with connect(
|
56
62
|
self.url,
|
57
63
|
additional_headers={"Authorization": f"Bearer {self.project_api_key}"},
|
@@ -61,15 +67,42 @@ class RemoteDebugger:
|
|
61
67
|
req_id = None
|
62
68
|
|
63
69
|
while not self.stop_flag:
|
70
|
+
# first check if any of the running tasks are done
|
71
|
+
done_tasks = []
|
72
|
+
for req_id, future in self.running_tasks.items():
|
73
|
+
if not future.done():
|
74
|
+
continue
|
75
|
+
done_tasks.append(req_id)
|
76
|
+
try:
|
77
|
+
response = future.result()
|
78
|
+
except Exception as e:
|
79
|
+
error_message = (
|
80
|
+
"Error occurred while running tool" + f"{tool.__name__}: {e}"
|
81
|
+
)
|
82
|
+
e = ToolCallError(error=error_message, reqId=req_id)
|
83
|
+
websocket.send(e.model_dump_json())
|
84
|
+
continue
|
85
|
+
formatted_response = None
|
86
|
+
try:
|
87
|
+
formatted_response = ToolCallResponse(
|
88
|
+
reqId=req_id, response=response
|
89
|
+
)
|
90
|
+
except pydantic.ValidationError:
|
91
|
+
formatted_response = ToolCallResponse(
|
92
|
+
reqId=req_id, response=str(response)
|
93
|
+
)
|
94
|
+
websocket.send(formatted_response.model_dump_json())
|
95
|
+
for req_id in done_tasks:
|
96
|
+
del self.running_tasks[req_id]
|
64
97
|
try:
|
65
98
|
# blocks the thread until a message
|
66
|
-
# is received or a timeout (
|
67
|
-
message = websocket.recv(
|
99
|
+
# is received or a timeout (0.1 seconds) occurs
|
100
|
+
message = websocket.recv(0.1)
|
68
101
|
except TimeoutError:
|
69
102
|
continue
|
70
103
|
except websockets.exceptions.ConnectionClosedError:
|
71
|
-
print("Connection
|
72
|
-
|
104
|
+
print("Connection interrupted by server. Trying to reconnect...")
|
105
|
+
self._run()
|
73
106
|
try:
|
74
107
|
tool_call = ToolCallRequest.model_validate_json(message)
|
75
108
|
req_id = tool_call.reqId
|
@@ -93,39 +126,18 @@ class RemoteDebugger:
|
|
93
126
|
arguments = json.loads(tool_call.toolCall.function.arguments)
|
94
127
|
except Exception:
|
95
128
|
pass
|
96
|
-
|
97
|
-
response = tool(**arguments)
|
98
|
-
except Exception as e:
|
99
|
-
error_message = (
|
100
|
-
"Error occurred while running tool" + f"{tool.__name__}: {e}"
|
101
|
-
)
|
102
|
-
e = ToolCallError(error=error_message, reqId=req_id)
|
103
|
-
websocket.send(e.model_dump_json())
|
104
|
-
continue
|
105
|
-
formatted_response = None
|
106
|
-
try:
|
107
|
-
formatted_response = ToolCallResponse(
|
108
|
-
reqId=tool_call.reqId, response=response
|
109
|
-
)
|
110
|
-
except pydantic.ValidationError:
|
111
|
-
formatted_response = ToolCallResponse(
|
112
|
-
reqId=tool_call.reqId, response=str(response)
|
113
|
-
)
|
114
|
-
websocket.send(formatted_response.model_dump_json())
|
129
|
+
self.running_tasks[tool_call.reqId] = self.executor.submit(tool, **arguments)
|
115
130
|
websocket.send(
|
116
131
|
DeregisterDebuggerRequest(
|
117
132
|
debuggerSessionId=self.session, deregister=True
|
118
133
|
).model_dump_json()
|
119
134
|
)
|
120
135
|
|
121
|
-
def _generate_session_id(self) -> str:
|
122
|
-
return uuid.uuid4().urn[9:]
|
123
|
-
|
124
136
|
def _format_session_id_and_registerd_functions(self) -> str:
|
125
137
|
registered_functions = ",\n".join(["- " + k for k in self.tools.keys()])
|
126
138
|
return f"""
|
127
139
|
========================================
|
128
|
-
|
140
|
+
Dev Session ID:
|
129
141
|
{self.session}
|
130
142
|
========================================
|
131
143
|
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|