lmnr 0.2.15__py3-none-any.whl → 0.3.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lmnr/__init__.py +4 -4
- lmnr/sdk/client.py +156 -0
- lmnr/sdk/collector.py +177 -0
- lmnr/sdk/constants.py +1 -0
- lmnr/sdk/context.py +456 -0
- lmnr/sdk/decorators.py +277 -0
- lmnr/sdk/interface.py +339 -0
- lmnr/sdk/providers/__init__.py +2 -0
- lmnr/sdk/providers/base.py +28 -0
- lmnr/sdk/providers/fallback.py +131 -0
- lmnr/sdk/providers/openai.py +140 -0
- lmnr/sdk/providers/utils.py +33 -0
- lmnr/sdk/tracing_types.py +197 -0
- lmnr/sdk/types.py +69 -0
- lmnr/sdk/utils.py +102 -0
- lmnr-0.3.0b1.dist-info/METADATA +186 -0
- lmnr-0.3.0b1.dist-info/RECORD +21 -0
- lmnr/cli/__init__.py +0 -0
- lmnr/cli/__main__.py +0 -4
- lmnr/cli/cli.py +0 -230
- lmnr/cli/parser/__init__.py +0 -0
- lmnr/cli/parser/nodes/__init__.py +0 -45
- lmnr/cli/parser/nodes/code.py +0 -36
- lmnr/cli/parser/nodes/condition.py +0 -30
- lmnr/cli/parser/nodes/input.py +0 -25
- lmnr/cli/parser/nodes/json_extractor.py +0 -29
- lmnr/cli/parser/nodes/llm.py +0 -56
- lmnr/cli/parser/nodes/output.py +0 -27
- lmnr/cli/parser/nodes/router.py +0 -37
- lmnr/cli/parser/nodes/semantic_search.py +0 -53
- lmnr/cli/parser/nodes/types.py +0 -153
- lmnr/cli/parser/parser.py +0 -62
- lmnr/cli/parser/utils.py +0 -49
- lmnr/cli/zip.py +0 -16
- lmnr/sdk/endpoint.py +0 -186
- lmnr/sdk/registry.py +0 -29
- lmnr/sdk/remote_debugger.py +0 -148
- lmnr/types.py +0 -101
- lmnr-0.2.15.dist-info/METADATA +0 -187
- lmnr-0.2.15.dist-info/RECORD +0 -28
- {lmnr-0.2.15.dist-info → lmnr-0.3.0b1.dist-info}/LICENSE +0 -0
- {lmnr-0.2.15.dist-info → lmnr-0.3.0b1.dist-info}/WHEEL +0 -0
- {lmnr-0.2.15.dist-info → lmnr-0.3.0b1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,186 @@
|
|
1
|
+
Metadata-Version: 2.1
|
2
|
+
Name: lmnr
|
3
|
+
Version: 0.3.0b1
|
4
|
+
Summary: Python SDK for Laminar AI
|
5
|
+
License: Apache-2.0
|
6
|
+
Author: lmnr.ai
|
7
|
+
Requires-Python: >=3.9,<4.0
|
8
|
+
Classifier: License :: OSI Approved :: Apache Software License
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
10
|
+
Classifier: Programming Language :: Python :: 3.9
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
14
|
+
Requires-Dist: backoff (>=2.2.1,<3.0.0)
|
15
|
+
Requires-Dist: black (>=24.4.2,<25.0.0)
|
16
|
+
Requires-Dist: openai (>=1.41.1,<2.0.0)
|
17
|
+
Requires-Dist: pydantic (>=2.7.4,<3.0.0)
|
18
|
+
Requires-Dist: python-dotenv (>=1.0.1,<2.0.0)
|
19
|
+
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
20
|
+
Description-Content-Type: text/markdown
|
21
|
+
|
22
|
+
# Laminar AI
|
23
|
+
|
24
|
+
This repo provides core for code generation, Laminar CLI, and Laminar SDK.
|
25
|
+
|
26
|
+
## Quickstart
|
27
|
+
```sh
|
28
|
+
python3 -m venv .myenv
|
29
|
+
source .myenv/bin/activate # or use your favorite env management tool
|
30
|
+
|
31
|
+
pip install lmnr
|
32
|
+
```
|
33
|
+
|
34
|
+
|
35
|
+
## Decorator instrumentation example
|
36
|
+
|
37
|
+
For easy automatic instrumentation, we provide you two simple primitives:
|
38
|
+
|
39
|
+
- `observe` - a multi-purpose automatic decorator that starts traces and spans when functions are entered, and finishes them when functions return
|
40
|
+
- `wrap_llm_call` - a function that takes in your LLM call and return a "decorated" version of it. This does all the same things as `observe`, plus
|
41
|
+
a few utilities around LLM-specific things, such as counting tokens and recording model params.
|
42
|
+
|
43
|
+
You can also import `lmnr_context` in order to interact and have more control over the context of the current span.
|
44
|
+
|
45
|
+
```python
|
46
|
+
import os
|
47
|
+
from openai import OpenAI
|
48
|
+
|
49
|
+
from lmnr import observe, wrap_llm_call, lmnr_context
|
50
|
+
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
|
51
|
+
|
52
|
+
@observe() # annotate all functions you want to trace
|
53
|
+
def poem_writer(topic="turbulence"):
|
54
|
+
prompt = f"write a poem about {topic}"
|
55
|
+
|
56
|
+
# wrap the actual final call to LLM with `wrap_llm_call`
|
57
|
+
response = wrap_llm_call(client.chat.completions.create)(
|
58
|
+
model="gpt-4o",
|
59
|
+
messages=[
|
60
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
61
|
+
{"role": "user", "content": prompt},
|
62
|
+
],
|
63
|
+
)
|
64
|
+
|
65
|
+
poem = response.choices[0].message.content
|
66
|
+
|
67
|
+
if topic in poem:
|
68
|
+
lmnr_context.event("topic_alignment") # send an event with a pre-defined name
|
69
|
+
|
70
|
+
# to trigger an automatic check for a possible event do:
|
71
|
+
lmnr_context.check_span_event("excessive_wordiness")
|
72
|
+
|
73
|
+
return poem
|
74
|
+
|
75
|
+
if __name__ == "__main__":
|
76
|
+
print(poem_writer(topic="laminar flow"))
|
77
|
+
```
|
78
|
+
|
79
|
+
This gives an advantage of quick instrumentation, but is somewhat limited in flexibility + doesn't really work as expected with threading.
|
80
|
+
This is due to the fact that we use `contextvars.ContextVar` for this, and how Python manages them between threads.
|
81
|
+
|
82
|
+
If you want to instrument your code manually, follow on to the next section
|
83
|
+
|
84
|
+
## Manual instrumentation example
|
85
|
+
|
86
|
+
For manual instrumetation you will need to import the following:
|
87
|
+
- `trace` - this is a function to start a trace. It returns a `TraceContext`
|
88
|
+
- `TraceContext` - a pointer to the current trace that you can pass around functions as you want.
|
89
|
+
- `SpanContext` - a pointer to the current span that you can pass around functions as you want
|
90
|
+
|
91
|
+
Both `TraceContext` and `SpanContext` expose the following interfaces:
|
92
|
+
- `span(name: str, **kwargs)` - create a child span within the current context. Returns `SpanContext`
|
93
|
+
- `update(**kwargs)` - update the current trace or span and return it. Returns `TraceContext` or `SpanContext`. Useful when some metadata becomes known later during the program execution
|
94
|
+
- `end(**kwargs)` – update the current span, and terminate it
|
95
|
+
|
96
|
+
In addition, `SpanContext` allows you to:
|
97
|
+
- `event(name: str, value: str | int = None)` - emit a custom event at any point
|
98
|
+
- `evaluate_event(name: str, data: str)` - register a possible event for automatic checking by Laminar.
|
99
|
+
|
100
|
+
Example:
|
101
|
+
|
102
|
+
```python
|
103
|
+
import os
|
104
|
+
from openai import OpenAI
|
105
|
+
|
106
|
+
from lmnr import trace, TraceContext, SpanContext
|
107
|
+
client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
|
108
|
+
|
109
|
+
def poem_writer(t: TraceContext, topic = "turbulence"):
|
110
|
+
span: SpanContext = t.span(name="poem_writer", input=None)
|
111
|
+
|
112
|
+
prompt = f"write a poem about {topic}"
|
113
|
+
messages = [
|
114
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
115
|
+
{"role": "user", "content": prompt},
|
116
|
+
]
|
117
|
+
# create a child span within the current `poem_writer` span.
|
118
|
+
llm_span = span.span(name="OpenAI completion", input=messages, span_type="LLM")
|
119
|
+
|
120
|
+
response = client.chat.completions.create(
|
121
|
+
model="gpt-4o-mini",
|
122
|
+
messages=[
|
123
|
+
{"role": "system", "content": "You are a helpful assistant."},
|
124
|
+
{"role": "user", "content": "Hello. What is the capital of France?"},
|
125
|
+
],
|
126
|
+
)
|
127
|
+
poem = response.choices[0].message.content
|
128
|
+
if topic in poem:
|
129
|
+
llm_span.event("topic_alignment") # send an event with a pre-defined name
|
130
|
+
|
131
|
+
# note that you can register possible events here as well, not only `llm_span.check_span_event()`
|
132
|
+
llm_span.end(output=poem, check_event_names=["excessive_wordiness"])
|
133
|
+
span.end(output=poem)
|
134
|
+
return poem
|
135
|
+
|
136
|
+
|
137
|
+
t: TraceContext = trace(user_id="user", session_id="session", release="release")
|
138
|
+
main(t, topic="laminar flow")
|
139
|
+
t.end(success=True)
|
140
|
+
```
|
141
|
+
|
142
|
+
## Features
|
143
|
+
|
144
|
+
- Make Laminar endpoint calls from your Python code
|
145
|
+
- Make Laminar endpoint calls that can run your own functions as tools
|
146
|
+
- CLI to generate code from pipelines you build on Laminar or execute your own functions while you test your flows in workshop
|
147
|
+
|
148
|
+
## Making Laminar pipeline calls
|
149
|
+
|
150
|
+
After you are ready to use your pipeline in your code, deploy it in Laminar by selecting the target version for the pipeline.
|
151
|
+
|
152
|
+
Once your pipeline target is set, you can call it from Python in just a few lines.
|
153
|
+
|
154
|
+
Example use:
|
155
|
+
|
156
|
+
```python
|
157
|
+
from lmnr import Laminar
|
158
|
+
|
159
|
+
# for decorator instrumentation, do: `from lmnr inport lmnr_context`
|
160
|
+
|
161
|
+
l = Laminar('<YOUR_PROJECT_API_KEY>')
|
162
|
+
result = l.run( # lmnr_context.run( for decorator instrumentation
|
163
|
+
pipeline = 'my_pipeline_name',
|
164
|
+
inputs = {'input_node_name': 'some_value'},
|
165
|
+
# all environment variables
|
166
|
+
env = {'OPENAI_API_KEY': 'sk-some-key'},
|
167
|
+
# any metadata to attach to this run's trace
|
168
|
+
metadata = {'session_id': 'your_custom_session_id'}
|
169
|
+
)
|
170
|
+
```
|
171
|
+
|
172
|
+
Resulting in:
|
173
|
+
|
174
|
+
```python
|
175
|
+
>>> result
|
176
|
+
PipelineRunResponse(
|
177
|
+
outputs={'output': {'value': [ChatMessage(role='user', content='hello')]}},
|
178
|
+
# useful to locate your trace
|
179
|
+
run_id='53b012d5-5759-48a6-a9c5-0011610e3669'
|
180
|
+
)
|
181
|
+
```
|
182
|
+
|
183
|
+
## PROJECT_API_KEY
|
184
|
+
|
185
|
+
Read more [here](https://docs.lmnr.ai/api-reference/introduction#authentication) on how to get `PROJECT_API_KEY`.
|
186
|
+
|
@@ -0,0 +1,21 @@
|
|
1
|
+
lmnr/__init__.py,sha256=U3sQyxCHM9ojzfo05XYxM0T_Bh1StZFSp5K82NjATxc,242
|
2
|
+
lmnr/sdk/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
|
+
lmnr/sdk/client.py,sha256=6mp4sQF1IEESPYe6ABFgchMBQBKr2AT7eqP-mIC5cEA,5482
|
4
|
+
lmnr/sdk/collector.py,sha256=6LRmPhOcmGplUDWm_sJh0dVrLTHknd_kmq7POGuAvoQ,5338
|
5
|
+
lmnr/sdk/constants.py,sha256=USCfwuUqRx6_0xC8WUxqGj766dInqQkWJcf8U5vPK7s,34
|
6
|
+
lmnr/sdk/context.py,sha256=jfu2HGyZEJYSDf-LQAmmK8MKFnNhYfR66k_baQWx99s,15271
|
7
|
+
lmnr/sdk/decorators.py,sha256=B2wdhs45-ZM0niotZBOx5FSSCu_vGQ32pntG9o3fKKU,11860
|
8
|
+
lmnr/sdk/interface.py,sha256=BucPNopp_Xvb1Tvn6We4ETvqqQiWtwjbCksAtt4qmvU,13717
|
9
|
+
lmnr/sdk/providers/__init__.py,sha256=wNCgQnt9-bnTNXLQWdPgyKhqA1ajiaEd1Rr2KPOpazM,54
|
10
|
+
lmnr/sdk/providers/base.py,sha256=xc6iA8yY_VK6tbzswt-3naZ53aAXtOLur9j8eimC_ZA,1054
|
11
|
+
lmnr/sdk/providers/fallback.py,sha256=DXnxBX1vxusGSUC76d0AjouR4NSoajQMdMeG37TRf4k,4741
|
12
|
+
lmnr/sdk/providers/openai.py,sha256=EygnBniKlcic6eIOfS5zORpytLqUYZxnDRB5Z4MnXZY,5193
|
13
|
+
lmnr/sdk/providers/utils.py,sha256=ROt82VrvezExYOxionAynD3dp6oX5JoPW6F1ayTm7q8,946
|
14
|
+
lmnr/sdk/tracing_types.py,sha256=RvVb8yCLjCu9DT59OX_tvUxaOTCtE6fcsDH4nMddzHA,6399
|
15
|
+
lmnr/sdk/types.py,sha256=hVxOsa3oCQQ-8aS_WkOtErg4nHJRkBVySfYlTgDlDyk,2084
|
16
|
+
lmnr/sdk/utils.py,sha256=1yhXtdGmVXfnc8SOQiTH_zAZGbZrzO8oaFd7q5nE7eY,3349
|
17
|
+
lmnr-0.3.0b1.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
|
18
|
+
lmnr-0.3.0b1.dist-info/METADATA,sha256=U5UBpCkOSbDzsoQw4b4GkJxGKgMlxv8qoVRLzAVupiw,6700
|
19
|
+
lmnr-0.3.0b1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
20
|
+
lmnr-0.3.0b1.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
|
21
|
+
lmnr-0.3.0b1.dist-info/RECORD,,
|
lmnr/cli/__init__.py
DELETED
File without changes
|
lmnr/cli/__main__.py
DELETED
lmnr/cli/cli.py
DELETED
@@ -1,230 +0,0 @@
|
|
1
|
-
from pathlib import Path
|
2
|
-
import sys
|
3
|
-
import requests
|
4
|
-
from dotenv import find_dotenv, get_key
|
5
|
-
import importlib
|
6
|
-
import os
|
7
|
-
import click
|
8
|
-
import logging
|
9
|
-
from cookiecutter.main import cookiecutter
|
10
|
-
from pydantic.alias_generators import to_pascal
|
11
|
-
|
12
|
-
from lmnr.cli.zip import zip_directory
|
13
|
-
from lmnr.sdk.registry import Registry as Pipeline
|
14
|
-
from lmnr.sdk.remote_debugger import RemoteDebugger
|
15
|
-
from lmnr.types import NodeFunction
|
16
|
-
|
17
|
-
from .parser.parser import runnable_graph_to_template_vars
|
18
|
-
|
19
|
-
from watchdog.observers import Observer
|
20
|
-
from watchdog.events import PatternMatchingEventHandler
|
21
|
-
import time
|
22
|
-
|
23
|
-
logger = logging.getLogger(__name__)
|
24
|
-
|
25
|
-
|
26
|
-
@click.group()
|
27
|
-
@click.version_option()
|
28
|
-
def cli():
|
29
|
-
"CLI for Laminar AI Engine"
|
30
|
-
|
31
|
-
|
32
|
-
@cli.command(name="pull")
|
33
|
-
@click.argument("pipeline_name")
|
34
|
-
@click.argument("pipeline_version_name")
|
35
|
-
@click.option(
|
36
|
-
"-p",
|
37
|
-
"--project-api-key",
|
38
|
-
help="Project API key",
|
39
|
-
)
|
40
|
-
@click.option(
|
41
|
-
"-l",
|
42
|
-
"--loglevel",
|
43
|
-
help="Sets logging level",
|
44
|
-
)
|
45
|
-
def pull(pipeline_name, pipeline_version_name, project_api_key, loglevel):
|
46
|
-
loglevel_str_to_val = {
|
47
|
-
"DEBUG": logging.DEBUG,
|
48
|
-
"INFO": logging.INFO,
|
49
|
-
"WARNING": logging.WARNING,
|
50
|
-
"ERROR": logging.ERROR,
|
51
|
-
"CRITICAL": logging.CRITICAL,
|
52
|
-
}
|
53
|
-
logging.basicConfig()
|
54
|
-
logging.getLogger().setLevel(loglevel_str_to_val.get(loglevel, logging.WARNING))
|
55
|
-
|
56
|
-
project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
|
57
|
-
if not project_api_key:
|
58
|
-
load_dotenv()
|
59
|
-
project_api_key = os.environ.get("LMNR_PROJECT_API_KEY")
|
60
|
-
if not project_api_key:
|
61
|
-
raise ValueError("LMNR_PROJECT_API_KEY is not set")
|
62
|
-
|
63
|
-
headers = {"Authorization": f"Bearer {project_api_key}"}
|
64
|
-
params = {
|
65
|
-
"pipelineName": pipeline_name,
|
66
|
-
"pipelineVersionName": pipeline_version_name,
|
67
|
-
}
|
68
|
-
res = requests.get(
|
69
|
-
"https://api.lmnr.ai/v2/pipeline-version-by-name",
|
70
|
-
headers=headers,
|
71
|
-
params=params,
|
72
|
-
)
|
73
|
-
if res.status_code != 200:
|
74
|
-
try:
|
75
|
-
res_json = res.json()
|
76
|
-
except Exception:
|
77
|
-
raise ValueError(
|
78
|
-
f"Error in fetching pipeline version: {res.status_code}\n{res.text}"
|
79
|
-
)
|
80
|
-
raise ValueError(
|
81
|
-
f"Error in fetching pipeline version: {res.status_code}\n{res_json}"
|
82
|
-
)
|
83
|
-
|
84
|
-
pipeline_version = res.json()
|
85
|
-
|
86
|
-
class_name = to_pascal(pipeline_name.replace(" ", "_").replace("-", "_"))
|
87
|
-
|
88
|
-
context = {
|
89
|
-
"pipeline_name": pipeline_name,
|
90
|
-
"pipeline_version_name": pipeline_version_name,
|
91
|
-
"class_name": class_name,
|
92
|
-
# _tasks starts from underscore because we don't want it to be templated
|
93
|
-
# some tasks contains LLM nodes which have prompts
|
94
|
-
# which we don't want to be rendered by cookiecutter
|
95
|
-
"_tasks": runnable_graph_to_template_vars(pipeline_version["runnableGraph"]),
|
96
|
-
}
|
97
|
-
|
98
|
-
logger.info(f"Context:\n{context}")
|
99
|
-
cookiecutter(
|
100
|
-
"https://github.com/lmnr-ai/lmnr-python-engine.git",
|
101
|
-
output_dir=".",
|
102
|
-
config_file=None,
|
103
|
-
extra_context=context,
|
104
|
-
no_input=True,
|
105
|
-
overwrite_if_exists=True,
|
106
|
-
)
|
107
|
-
|
108
|
-
|
109
|
-
@cli.command(name="deploy")
|
110
|
-
@click.argument("endpoint_id")
|
111
|
-
@click.option(
|
112
|
-
"-p",
|
113
|
-
"--project-api-key",
|
114
|
-
help="Project API key",
|
115
|
-
)
|
116
|
-
def deploy(endpoint_id, project_api_key):
|
117
|
-
project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
|
118
|
-
if not project_api_key:
|
119
|
-
load_dotenv()
|
120
|
-
project_api_key = os.environ.get("LMNR_PROJECT_API_KEY")
|
121
|
-
if not project_api_key:
|
122
|
-
raise ValueError("LMNR_PROJECT_API_KEY is not set")
|
123
|
-
|
124
|
-
current_directory = Path.cwd()
|
125
|
-
zip_file_path = current_directory / "archive.zip"
|
126
|
-
|
127
|
-
zip_directory(current_directory, zip_file_path)
|
128
|
-
|
129
|
-
try:
|
130
|
-
url = f"https://api.lmnr.ai/v2/endpoints/{endpoint_id}/deploy-code"
|
131
|
-
with open(zip_file_path, "rb") as f:
|
132
|
-
headers = {
|
133
|
-
"Authorization": f"Bearer {project_api_key}",
|
134
|
-
}
|
135
|
-
files = {"file": f}
|
136
|
-
response = requests.post(url, headers=headers, files=files)
|
137
|
-
|
138
|
-
if response.status_code != 200:
|
139
|
-
raise ValueError(
|
140
|
-
f"Error in deploying code: {response.status_code}\n{response.text}"
|
141
|
-
)
|
142
|
-
except Exception:
|
143
|
-
logging.exception("Error in deploying code")
|
144
|
-
finally:
|
145
|
-
Path.unlink(zip_file_path, missing_ok=True)
|
146
|
-
|
147
|
-
|
148
|
-
def _load_functions(cur_dir: str) -> dict[str, NodeFunction]:
|
149
|
-
parent_dir, name = os.path.split(cur_dir) # e.g. /Users/username, project_name
|
150
|
-
|
151
|
-
# Needed to __import__ pipeline.py
|
152
|
-
if sys.path[0] != parent_dir:
|
153
|
-
sys.path.insert(0, parent_dir)
|
154
|
-
# Needed to import src in pipeline.py and other files
|
155
|
-
if cur_dir not in sys.path:
|
156
|
-
sys.path.insert(0, cur_dir)
|
157
|
-
|
158
|
-
module_name = f"{name}.pipeline"
|
159
|
-
if module_name in sys.modules:
|
160
|
-
# Reload the module to get the updated version
|
161
|
-
importlib.reload(sys.modules[module_name])
|
162
|
-
else:
|
163
|
-
# Import the module for the first time
|
164
|
-
__import__(module_name)
|
165
|
-
|
166
|
-
module = sys.modules[module_name]
|
167
|
-
|
168
|
-
matches = [v for v in module.__dict__.values() if isinstance(v, Pipeline)]
|
169
|
-
if not matches:
|
170
|
-
raise ValueError("No Pipeline found in the module")
|
171
|
-
if len(matches) > 1:
|
172
|
-
raise ValueError("Multiple Pipelines found in the module")
|
173
|
-
pipeline = matches[0]
|
174
|
-
|
175
|
-
return pipeline.functions
|
176
|
-
|
177
|
-
class SimpleEventHandler(PatternMatchingEventHandler):
|
178
|
-
def __init__(self, project_api_key: str, session_id: str, functions: dict[str, NodeFunction]):
|
179
|
-
super().__init__(ignore_patterns=["*.pyc*", "*.pyo", "**/__pycache__"])
|
180
|
-
self.project_api_key = project_api_key
|
181
|
-
self.session_id = session_id
|
182
|
-
self.functions = functions
|
183
|
-
self.debugger = RemoteDebugger(project_api_key, session_id, functions)
|
184
|
-
self.debugger.start()
|
185
|
-
|
186
|
-
def on_any_event(self, event):
|
187
|
-
print(f"Files at {event.src_path} updated. Restarting debugger...")
|
188
|
-
self.debugger.stop()
|
189
|
-
self.functions = _load_functions(os.getcwd())
|
190
|
-
self.debugger = RemoteDebugger(self.project_api_key, self.session_id, self.functions)
|
191
|
-
self.debugger.start()
|
192
|
-
|
193
|
-
@cli.command(name="dev")
|
194
|
-
@click.option(
|
195
|
-
"-p",
|
196
|
-
"--project-api-key",
|
197
|
-
help="Project API key. If not provided, LMNR_PROJECT_API_KEY from os.environ or .env is used",
|
198
|
-
)
|
199
|
-
@click.option(
|
200
|
-
"-s",
|
201
|
-
"--dev-session-id",
|
202
|
-
help="Dev session ID. If not provided, LMNR_DEV_SESSION_ID from os.environ or .env is used",
|
203
|
-
)
|
204
|
-
def dev(project_api_key, dev_session_id):
|
205
|
-
cur_dir = os.getcwd() # e.g. /Users/username/project_name
|
206
|
-
env_path = find_dotenv(usecwd=True)
|
207
|
-
project_api_key = project_api_key or os.environ.get("LMNR_PROJECT_API_KEY")
|
208
|
-
if not project_api_key:
|
209
|
-
project_api_key = get_key(env_path, "LMNR_PROJECT_API_KEY")
|
210
|
-
if not project_api_key:
|
211
|
-
raise ValueError("LMNR_PROJECT_API_KEY is not set")
|
212
|
-
|
213
|
-
session_id = dev_session_id or os.environ.get("LMNR_DEV_SESSION_ID")
|
214
|
-
if not session_id:
|
215
|
-
session_id = get_key(env_path, "LMNR_DEV_SESSION_ID")
|
216
|
-
if not session_id:
|
217
|
-
raise ValueError("LMNR_DEV_SESSION_ID is not set")
|
218
|
-
functions = _load_functions(cur_dir)
|
219
|
-
|
220
|
-
observer = Observer()
|
221
|
-
handler = SimpleEventHandler(project_api_key, session_id, functions)
|
222
|
-
observer.schedule(handler, cur_dir, recursive=True)
|
223
|
-
observer.start()
|
224
|
-
try:
|
225
|
-
while True:
|
226
|
-
time.sleep(1)
|
227
|
-
except KeyboardInterrupt:
|
228
|
-
handler.debugger.stop()
|
229
|
-
observer.stop()
|
230
|
-
observer.join()
|
lmnr/cli/parser/__init__.py
DELETED
File without changes
|
@@ -1,45 +0,0 @@
|
|
1
|
-
from abc import ABCMeta, abstractmethod
|
2
|
-
from dataclasses import dataclass
|
3
|
-
from typing import Optional
|
4
|
-
import uuid
|
5
|
-
|
6
|
-
|
7
|
-
@dataclass
|
8
|
-
class Handle:
|
9
|
-
id: uuid.UUID
|
10
|
-
name: Optional[str]
|
11
|
-
|
12
|
-
@classmethod
|
13
|
-
def from_dict(cls, dict: dict) -> "Handle":
|
14
|
-
return cls(
|
15
|
-
id=uuid.UUID(dict["id"]),
|
16
|
-
name=(dict["name"] if "name" in dict else None),
|
17
|
-
)
|
18
|
-
|
19
|
-
|
20
|
-
@abstractmethod
|
21
|
-
class NodeFunctions(metaclass=ABCMeta):
|
22
|
-
@abstractmethod
|
23
|
-
def handles_mapping(
|
24
|
-
self, output_handle_id_to_node_name: dict[str, str]
|
25
|
-
) -> list[tuple[str, str]]:
|
26
|
-
"""
|
27
|
-
Returns a list of tuples mapping from this node's input
|
28
|
-
handle name to the unique name of the previous node.
|
29
|
-
|
30
|
-
Assumes previous node has only one output.
|
31
|
-
"""
|
32
|
-
pass
|
33
|
-
|
34
|
-
@abstractmethod
|
35
|
-
def node_type(self) -> str:
|
36
|
-
pass
|
37
|
-
|
38
|
-
@abstractmethod
|
39
|
-
def config(self) -> dict:
|
40
|
-
"""
|
41
|
-
Returns a dictionary of node-specific configuration.
|
42
|
-
|
43
|
-
E.g. prompt and model name for LLM node.
|
44
|
-
"""
|
45
|
-
pass
|
lmnr/cli/parser/nodes/code.py
DELETED
@@ -1,36 +0,0 @@
|
|
1
|
-
from dataclasses import dataclass
|
2
|
-
import uuid
|
3
|
-
|
4
|
-
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
5
|
-
from lmnr.cli.parser.utils import map_handles
|
6
|
-
|
7
|
-
|
8
|
-
@dataclass
|
9
|
-
class CodeNode(NodeFunctions):
|
10
|
-
id: uuid.UUID
|
11
|
-
name: str
|
12
|
-
inputs: list[Handle]
|
13
|
-
outputs: list[Handle]
|
14
|
-
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
15
|
-
code: str
|
16
|
-
fn_name: str
|
17
|
-
|
18
|
-
def handles_mapping(
|
19
|
-
self, output_handle_id_to_node_name: dict[str, str]
|
20
|
-
) -> list[tuple[str, str]]:
|
21
|
-
return map_handles(
|
22
|
-
self.inputs, self.inputs_mappings, output_handle_id_to_node_name
|
23
|
-
)
|
24
|
-
|
25
|
-
def node_type(self) -> str:
|
26
|
-
return "Code"
|
27
|
-
|
28
|
-
def config(self) -> dict:
|
29
|
-
return {
|
30
|
-
"code": self.code,
|
31
|
-
"fn_name": self.fn_name,
|
32
|
-
"fn_inputs": ", ".join(
|
33
|
-
f"{handle.name}=input_to_code_node_arg({handle.name})"
|
34
|
-
for handle in self.inputs
|
35
|
-
),
|
36
|
-
}
|
@@ -1,30 +0,0 @@
|
|
1
|
-
from dataclasses import dataclass
|
2
|
-
import uuid
|
3
|
-
|
4
|
-
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
5
|
-
from lmnr.cli.parser.utils import map_handles
|
6
|
-
|
7
|
-
|
8
|
-
@dataclass
|
9
|
-
class ConditionNode(NodeFunctions):
|
10
|
-
id: uuid.UUID
|
11
|
-
name: str
|
12
|
-
inputs: list[Handle]
|
13
|
-
outputs: list[Handle]
|
14
|
-
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
15
|
-
condition: str
|
16
|
-
|
17
|
-
def handles_mapping(
|
18
|
-
self, output_handle_id_to_node_name: dict[str, str]
|
19
|
-
) -> list[tuple[str, str]]:
|
20
|
-
return map_handles(
|
21
|
-
self.inputs, self.inputs_mappings, output_handle_id_to_node_name
|
22
|
-
)
|
23
|
-
|
24
|
-
def node_type(self) -> str:
|
25
|
-
return "Condition"
|
26
|
-
|
27
|
-
def config(self) -> dict:
|
28
|
-
return {
|
29
|
-
"condition": self.condition,
|
30
|
-
}
|
lmnr/cli/parser/nodes/input.py
DELETED
@@ -1,25 +0,0 @@
|
|
1
|
-
from dataclasses import dataclass
|
2
|
-
from typing import Optional
|
3
|
-
import uuid
|
4
|
-
|
5
|
-
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
6
|
-
from lmnr.types import NodeInput
|
7
|
-
|
8
|
-
|
9
|
-
@dataclass
|
10
|
-
class InputNode(NodeFunctions):
|
11
|
-
id: uuid.UUID
|
12
|
-
name: str
|
13
|
-
outputs: list[Handle]
|
14
|
-
input: Optional[NodeInput]
|
15
|
-
|
16
|
-
def handles_mapping(
|
17
|
-
self, output_handle_id_to_node_name: dict[str, str]
|
18
|
-
) -> list[tuple[str, str]]:
|
19
|
-
return []
|
20
|
-
|
21
|
-
def node_type(self) -> str:
|
22
|
-
return "Input"
|
23
|
-
|
24
|
-
def config(self) -> dict:
|
25
|
-
return {}
|
@@ -1,29 +0,0 @@
|
|
1
|
-
from dataclasses import dataclass
|
2
|
-
|
3
|
-
import uuid
|
4
|
-
|
5
|
-
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
6
|
-
from lmnr.cli.parser.utils import map_handles
|
7
|
-
|
8
|
-
|
9
|
-
@dataclass
|
10
|
-
class JsonExtractorNode(NodeFunctions):
|
11
|
-
id: uuid.UUID
|
12
|
-
name: str
|
13
|
-
inputs: list[Handle]
|
14
|
-
outputs: list[Handle]
|
15
|
-
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
16
|
-
template: str
|
17
|
-
|
18
|
-
def handles_mapping(
|
19
|
-
self, output_handle_id_to_node_name: dict[str, str]
|
20
|
-
) -> list[tuple[str, str]]:
|
21
|
-
return map_handles(
|
22
|
-
self.inputs, self.inputs_mappings, output_handle_id_to_node_name
|
23
|
-
)
|
24
|
-
|
25
|
-
def node_type(self) -> str:
|
26
|
-
return "JsonExtractor"
|
27
|
-
|
28
|
-
def config(self) -> dict:
|
29
|
-
return {"template": self.template}
|
lmnr/cli/parser/nodes/llm.py
DELETED
@@ -1,56 +0,0 @@
|
|
1
|
-
from dataclasses import dataclass
|
2
|
-
from typing import Optional
|
3
|
-
import uuid
|
4
|
-
|
5
|
-
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
6
|
-
from lmnr.cli.parser.utils import map_handles
|
7
|
-
|
8
|
-
|
9
|
-
@dataclass
|
10
|
-
class LLMNode(NodeFunctions):
|
11
|
-
id: uuid.UUID
|
12
|
-
name: str
|
13
|
-
inputs: list[Handle]
|
14
|
-
dynamic_inputs: list[Handle]
|
15
|
-
outputs: list[Handle]
|
16
|
-
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
17
|
-
prompt: str
|
18
|
-
model: str
|
19
|
-
model_params: Optional[str]
|
20
|
-
stream: bool
|
21
|
-
structured_output_enabled: bool
|
22
|
-
structured_output_max_retries: int
|
23
|
-
structured_output_schema: Optional[str]
|
24
|
-
structured_output_schema_target: Optional[str]
|
25
|
-
|
26
|
-
def handles_mapping(
|
27
|
-
self, output_handle_id_to_node_name: dict[str, str]
|
28
|
-
) -> list[tuple[str, str]]:
|
29
|
-
combined_inputs = self.inputs + self.dynamic_inputs
|
30
|
-
return map_handles(
|
31
|
-
combined_inputs, self.inputs_mappings, output_handle_id_to_node_name
|
32
|
-
)
|
33
|
-
|
34
|
-
def node_type(self) -> str:
|
35
|
-
return "LLM"
|
36
|
-
|
37
|
-
def config(self) -> dict:
|
38
|
-
# For easier access in the template separate the provider and model here
|
39
|
-
provider, model = self.model.split(":", maxsplit=1)
|
40
|
-
|
41
|
-
return {
|
42
|
-
"prompt": self.prompt,
|
43
|
-
"provider": provider,
|
44
|
-
"model": model,
|
45
|
-
"model_params": self.model_params,
|
46
|
-
"stream": self.stream,
|
47
|
-
"enable_structured_output": self.structured_output_enabled
|
48
|
-
and self.structured_output_schema is not None,
|
49
|
-
"structured_output_max_retries": self.structured_output_max_retries,
|
50
|
-
"structured_output_schema": self.structured_output_schema,
|
51
|
-
"structured_output_schema_target_str": (
|
52
|
-
"None"
|
53
|
-
if self.structured_output_schema_target is None
|
54
|
-
else f'"{self.structured_output_schema_target}"'
|
55
|
-
),
|
56
|
-
}
|