lmnr 0.2.6__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lmnr/cli/parser/nodes/code.py +27 -0
- lmnr/cli/parser/nodes/condition.py +30 -0
- lmnr/cli/parser/nodes/router.py +37 -0
- lmnr/cli/parser/nodes/types.py +48 -1
- lmnr/cli/parser/parser.py +4 -4
- lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/engine.py +36 -5
- lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/nodes/functions.py +44 -8
- lmnr/types.py +8 -3
- {lmnr-0.2.6.dist-info → lmnr-0.2.7.dist-info}/METADATA +2 -2
- {lmnr-0.2.6.dist-info → lmnr-0.2.7.dist-info}/RECORD +13 -10
- {lmnr-0.2.6.dist-info → lmnr-0.2.7.dist-info}/LICENSE +0 -0
- {lmnr-0.2.6.dist-info → lmnr-0.2.7.dist-info}/WHEEL +0 -0
- {lmnr-0.2.6.dist-info → lmnr-0.2.7.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,27 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
import uuid
|
3
|
+
|
4
|
+
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
5
|
+
from lmnr.cli.parser.utils import map_handles
|
6
|
+
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class CodeNode(NodeFunctions):
|
10
|
+
id: uuid.UUID
|
11
|
+
name: str
|
12
|
+
inputs: list[Handle]
|
13
|
+
outputs: list[Handle]
|
14
|
+
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
15
|
+
|
16
|
+
def handles_mapping(
|
17
|
+
self, output_handle_id_to_node_name: dict[str, str]
|
18
|
+
) -> list[tuple[str, str]]:
|
19
|
+
return map_handles(
|
20
|
+
self.inputs, self.inputs_mappings, output_handle_id_to_node_name
|
21
|
+
)
|
22
|
+
|
23
|
+
def node_type(self) -> str:
|
24
|
+
return "Code"
|
25
|
+
|
26
|
+
def config(self) -> dict:
|
27
|
+
return {}
|
@@ -0,0 +1,30 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
import uuid
|
3
|
+
|
4
|
+
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
5
|
+
from lmnr.cli.parser.utils import map_handles
|
6
|
+
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class ConditionNode(NodeFunctions):
|
10
|
+
id: uuid.UUID
|
11
|
+
name: str
|
12
|
+
inputs: list[Handle]
|
13
|
+
outputs: list[Handle]
|
14
|
+
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
15
|
+
condition: str
|
16
|
+
|
17
|
+
def handles_mapping(
|
18
|
+
self, output_handle_id_to_node_name: dict[str, str]
|
19
|
+
) -> list[tuple[str, str]]:
|
20
|
+
return map_handles(
|
21
|
+
self.inputs, self.inputs_mappings, output_handle_id_to_node_name
|
22
|
+
)
|
23
|
+
|
24
|
+
def node_type(self) -> str:
|
25
|
+
return "Condition"
|
26
|
+
|
27
|
+
def config(self) -> dict:
|
28
|
+
return {
|
29
|
+
"condition": self.condition,
|
30
|
+
}
|
@@ -0,0 +1,37 @@
|
|
1
|
+
from dataclasses import dataclass
|
2
|
+
import uuid
|
3
|
+
|
4
|
+
from lmnr.cli.parser.nodes import Handle, NodeFunctions
|
5
|
+
from lmnr.cli.parser.utils import map_handles
|
6
|
+
|
7
|
+
|
8
|
+
@dataclass
|
9
|
+
class Route:
|
10
|
+
name: str
|
11
|
+
|
12
|
+
|
13
|
+
@dataclass
|
14
|
+
class RouterNode(NodeFunctions):
|
15
|
+
id: uuid.UUID
|
16
|
+
name: str
|
17
|
+
inputs: list[Handle]
|
18
|
+
outputs: list[Handle]
|
19
|
+
inputs_mappings: dict[uuid.UUID, uuid.UUID]
|
20
|
+
routes: list[Route]
|
21
|
+
has_default_route: bool
|
22
|
+
|
23
|
+
def handles_mapping(
|
24
|
+
self, output_handle_id_to_node_name: dict[str, str]
|
25
|
+
) -> list[tuple[str, str]]:
|
26
|
+
return map_handles(
|
27
|
+
self.inputs, self.inputs_mappings, output_handle_id_to_node_name
|
28
|
+
)
|
29
|
+
|
30
|
+
def node_type(self) -> str:
|
31
|
+
return "Router"
|
32
|
+
|
33
|
+
def config(self) -> dict:
|
34
|
+
return {
|
35
|
+
"routes": str([route.name for route in self.routes]),
|
36
|
+
"has_default_route": str(self.has_default_route),
|
37
|
+
}
|
lmnr/cli/parser/nodes/types.py
CHANGED
@@ -2,9 +2,12 @@ from typing import Any, Union
|
|
2
2
|
import uuid
|
3
3
|
|
4
4
|
from lmnr.cli.parser.nodes import Handle
|
5
|
+
from lmnr.cli.parser.nodes.code import CodeNode
|
6
|
+
from lmnr.cli.parser.nodes.condition import ConditionNode
|
5
7
|
from lmnr.cli.parser.nodes.input import InputNode
|
6
8
|
from lmnr.cli.parser.nodes.llm import LLMNode
|
7
9
|
from lmnr.cli.parser.nodes.output import OutputNode
|
10
|
+
from lmnr.cli.parser.nodes.router import Route, RouterNode
|
8
11
|
from lmnr.cli.parser.nodes.semantic_search import (
|
9
12
|
SemanticSearchDatasource,
|
10
13
|
SemanticSearchNode,
|
@@ -21,7 +24,15 @@ def node_input_from_json(json_val: Any) -> NodeInput:
|
|
21
24
|
raise ValueError(f"Invalid NodeInput value: {json_val}")
|
22
25
|
|
23
26
|
|
24
|
-
Node = Union[
|
27
|
+
Node = Union[
|
28
|
+
InputNode,
|
29
|
+
OutputNode,
|
30
|
+
ConditionNode,
|
31
|
+
LLMNode,
|
32
|
+
RouterNode,
|
33
|
+
SemanticSearchNode,
|
34
|
+
CodeNode,
|
35
|
+
]
|
25
36
|
|
26
37
|
|
27
38
|
def node_from_dict(node_dict: dict) -> Node:
|
@@ -44,6 +55,18 @@ def node_from_dict(node_dict: dict) -> Node:
|
|
44
55
|
for k, v in node_dict["inputsMappings"].items()
|
45
56
|
},
|
46
57
|
)
|
58
|
+
elif node_dict["type"] == "Condition":
|
59
|
+
return ConditionNode(
|
60
|
+
id=uuid.UUID(node_dict["id"]),
|
61
|
+
name=node_dict["name"],
|
62
|
+
inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
|
63
|
+
outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
|
64
|
+
inputs_mappings={
|
65
|
+
uuid.UUID(k): uuid.UUID(v)
|
66
|
+
for k, v in node_dict["inputsMappings"].items()
|
67
|
+
},
|
68
|
+
condition=node_dict["condition"],
|
69
|
+
)
|
47
70
|
elif node_dict["type"] == "LLM":
|
48
71
|
return LLMNode(
|
49
72
|
id=uuid.UUID(node_dict["id"]),
|
@@ -69,6 +92,19 @@ def node_from_dict(node_dict: dict) -> Node:
|
|
69
92
|
structured_output_schema=None,
|
70
93
|
structured_output_schema_target=None,
|
71
94
|
)
|
95
|
+
elif node_dict["type"] == "Router":
|
96
|
+
return RouterNode(
|
97
|
+
id=uuid.UUID(node_dict["id"]),
|
98
|
+
name=node_dict["name"],
|
99
|
+
inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
|
100
|
+
outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
|
101
|
+
inputs_mappings={
|
102
|
+
uuid.UUID(k): uuid.UUID(v)
|
103
|
+
for k, v in node_dict["inputsMappings"].items()
|
104
|
+
},
|
105
|
+
routes=[Route(name=route["name"]) for route in node_dict["routes"]],
|
106
|
+
has_default_route=node_dict["hasDefaultRoute"],
|
107
|
+
)
|
72
108
|
elif node_dict["type"] == "SemanticSearch":
|
73
109
|
return SemanticSearchNode(
|
74
110
|
id=uuid.UUID(node_dict["id"]),
|
@@ -87,5 +123,16 @@ def node_from_dict(node_dict: dict) -> Node:
|
|
87
123
|
for ds in node_dict["datasources"]
|
88
124
|
],
|
89
125
|
)
|
126
|
+
elif node_dict["type"] == "Code":
|
127
|
+
return CodeNode(
|
128
|
+
id=uuid.UUID(node_dict["id"]),
|
129
|
+
name=node_dict["name"],
|
130
|
+
inputs=[Handle.from_dict(handle) for handle in node_dict["inputs"]],
|
131
|
+
outputs=[Handle.from_dict(handle) for handle in node_dict["outputs"]],
|
132
|
+
inputs_mappings={
|
133
|
+
uuid.UUID(k): uuid.UUID(v)
|
134
|
+
for k, v in node_dict["inputsMappings"].items()
|
135
|
+
},
|
136
|
+
)
|
90
137
|
else:
|
91
138
|
raise ValueError(f"Node type {node_dict['type']} not supported")
|
lmnr/cli/parser/parser.py
CHANGED
@@ -17,6 +17,9 @@ def runnable_graph_to_template_vars(graph: dict) -> dict:
|
|
17
17
|
node = node_from_dict(node_obj)
|
18
18
|
handles_mapping = node.handles_mapping(output_handle_id_to_node_name)
|
19
19
|
node_type = node.node_type()
|
20
|
+
|
21
|
+
unique_handles = set([handle_name for (handle_name, _) in handles_mapping])
|
22
|
+
|
20
23
|
tasks.append(
|
21
24
|
{
|
22
25
|
"name": node.name,
|
@@ -28,10 +31,7 @@ def runnable_graph_to_template_vars(graph: dict) -> dict:
|
|
28
31
|
handle_name for (handle_name, _) in handles_mapping
|
29
32
|
],
|
30
33
|
"handle_args": ", ".join(
|
31
|
-
[
|
32
|
-
f"{handle_name}: NodeInput"
|
33
|
-
for (handle_name, _) in handles_mapping
|
34
|
-
]
|
34
|
+
[f"{handle_name}: NodeInput" for handle_name in unique_handles]
|
35
35
|
),
|
36
36
|
"prev": [],
|
37
37
|
"next": [],
|
@@ -170,13 +170,17 @@ class Engine:
|
|
170
170
|
active_tasks.remove(task_id)
|
171
171
|
|
172
172
|
if depth > 0:
|
173
|
-
|
174
|
-
# TODO: Implement this for cycles
|
175
|
-
raise NotImplementedError()
|
173
|
+
self.propagate_reset(task_id, task_id, tasks)
|
176
174
|
|
175
|
+
# terminate graph on recursion depth exceeding 10
|
177
176
|
if depth == 10:
|
178
|
-
|
179
|
-
|
177
|
+
logging.error("Max recursion depth exceeded, terminating graph")
|
178
|
+
error = Message(
|
179
|
+
id=uuid.uuid4(),
|
180
|
+
value="Max recursion depth exceeded",
|
181
|
+
start_time=start_time,
|
182
|
+
end_time=datetime.datetime.now(),
|
183
|
+
)
|
180
184
|
|
181
185
|
if not next:
|
182
186
|
# if there are no next tasks, we can terminate the graph
|
@@ -260,3 +264,30 @@ class Engine:
|
|
260
264
|
task,
|
261
265
|
queue,
|
262
266
|
)
|
267
|
+
|
268
|
+
def propagate_reset(
|
269
|
+
self, current_task_name: str, start_task_name: str, tasks: dict[str, Task]
|
270
|
+
) -> None:
|
271
|
+
task = tasks[current_task_name]
|
272
|
+
|
273
|
+
for next_task_name in task.next:
|
274
|
+
if next_task_name == start_task_name:
|
275
|
+
return
|
276
|
+
|
277
|
+
next_task = tasks[next_task_name]
|
278
|
+
|
279
|
+
# in majority of cases there will be only one handle name
|
280
|
+
# however we need to handle the case when single output is mapped
|
281
|
+
# to multiple inputs on the next node
|
282
|
+
handle_names = []
|
283
|
+
for k, v in next_task.handles_mapping:
|
284
|
+
if v == task.name:
|
285
|
+
handle_names.append(k)
|
286
|
+
|
287
|
+
for handle_name in handle_names:
|
288
|
+
next_state = next_task.input_states[handle_name]
|
289
|
+
|
290
|
+
if next_state.get_state().is_success():
|
291
|
+
next_state.set_state(State.empty())
|
292
|
+
next_state.semaphore.release()
|
293
|
+
self.propagate_reset(next_task_name, start_task_name, tasks)
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import requests
|
2
2
|
import json
|
3
3
|
|
4
|
+
from lmnr.types import ConditionedValue
|
4
5
|
from lmnr_engine.engine.action import NodeRunError, RunOutput
|
5
6
|
from lmnr_engine.types import ChatMessage, NodeInput
|
6
7
|
|
@@ -24,8 +25,8 @@ def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunO
|
|
24
25
|
rendered_prompt = """{{task.config.prompt}}"""
|
25
26
|
{% set prompt_variables = task.input_handle_names|reject("equalto", "chat_messages") %}
|
26
27
|
{% for prompt_variable in prompt_variables %}
|
27
|
-
# TODO: Fix this. Using double curly braces in quotes because normal double curly braces
|
28
|
-
# get replaced during rendering by Cookiecutter. This is a hacky solution
|
28
|
+
{# TODO: Fix this. Using double curly braces in quotes because normal double curly braces
|
29
|
+
# get replaced during rendering by Cookiecutter. This is a hacky solution.#}
|
29
30
|
rendered_prompt = rendered_prompt.replace("{{'{{'}}{{prompt_variable}}{{'}}'}}", {{prompt_variable}}) # type: ignore
|
30
31
|
{% endfor %}
|
31
32
|
|
@@ -68,7 +69,8 @@ def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunO
|
|
68
69
|
completion_message = chat_completion["choices"][0]["message"]["content"]
|
69
70
|
|
70
71
|
meta_log = {}
|
71
|
-
|
72
|
+
{# TODO: Add node chunk id #}
|
73
|
+
meta_log["node_chunk_id"] = None
|
72
74
|
meta_log["model"] = "{{task.config.model}}"
|
73
75
|
meta_log["prompt"] = rendered_prompt
|
74
76
|
meta_log["input_message_count"] = len(messages)
|
@@ -77,7 +79,8 @@ def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunO
|
|
77
79
|
meta_log["total_token_count"] = (
|
78
80
|
chat_completion["usage"]["prompt_tokens"] + chat_completion["usage"]["completion_tokens"]
|
79
81
|
)
|
80
|
-
|
82
|
+
{# TODO: Add approximate cost #}
|
83
|
+
meta_log["approximate_cost"] = None
|
81
84
|
{% elif task.config.provider == "anthropic" %}
|
82
85
|
data = {
|
83
86
|
"model": "{{task.config.model}}",
|
@@ -85,7 +88,7 @@ def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunO
|
|
85
88
|
}
|
86
89
|
data.update(params)
|
87
90
|
|
88
|
-
# TODO: Generate appropriate code based on this if-else block
|
91
|
+
{# TODO: Generate appropriate code based on this if-else block #}
|
89
92
|
if len(messages) == 1 and messages[0].role == "system":
|
90
93
|
messages[0].role = "user"
|
91
94
|
message_jsons = [
|
@@ -116,7 +119,8 @@ def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunO
|
|
116
119
|
completion_message = chat_completion["content"][0]["text"]
|
117
120
|
|
118
121
|
meta_log = {}
|
119
|
-
|
122
|
+
{# TODO: Add node chunk id#}
|
123
|
+
meta_log["node_chunk_id"] = None
|
120
124
|
meta_log["model"] = "{{task.config.model}}"
|
121
125
|
meta_log["prompt"] = rendered_prompt
|
122
126
|
meta_log["input_message_count"] = len(messages)
|
@@ -125,7 +129,8 @@ def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunO
|
|
125
129
|
meta_log["total_token_count"] = (
|
126
130
|
chat_completion["usage"]["input_tokens"] + chat_completion["usage"]["output_tokens"]
|
127
131
|
)
|
128
|
-
|
132
|
+
{# TODO: Add approximate cost#}
|
133
|
+
meta_log["approximate_cost"] = None
|
129
134
|
{% else %}
|
130
135
|
{% endif %}
|
131
136
|
|
@@ -150,7 +155,7 @@ def {{task.function_name}}(query: NodeInput, _env: dict[str, str]) -> RunOutput:
|
|
150
155
|
}
|
151
156
|
query_res = requests.post("https://api.lmnr.ai/v2/semantic-search", headers=headers, json=data)
|
152
157
|
if query_res.status_code != 200:
|
153
|
-
raise NodeRunError(f"Vector search request failed:
|
158
|
+
raise NodeRunError(f"Vector search request failed:{query_res.status_code}\n{query_res.text}")
|
154
159
|
|
155
160
|
results = query_res.json()
|
156
161
|
|
@@ -168,6 +173,37 @@ def {{task.function_name}}(query: NodeInput, _env: dict[str, str]) -> RunOutput:
|
|
168
173
|
return RunOutput(status="Success", output=output)
|
169
174
|
|
170
175
|
|
176
|
+
{% elif task.node_type == "Router" %}
|
177
|
+
def {{task.function_name}}(condition: NodeInput, input: NodeInput, _env: dict[str, str]) -> RunOutput:
|
178
|
+
routes = {{ task.config.routes }}
|
179
|
+
has_default_route = {{ task.config.has_default_route }}
|
180
|
+
|
181
|
+
for route in routes:
|
182
|
+
if route == condition:
|
183
|
+
return RunOutput(status="Success", output=ConditionedValue(condition=route, value=input))
|
184
|
+
|
185
|
+
if has_default_route:
|
186
|
+
return RunOutput(status="Success", output=ConditionedValue(condition=routes[-1], value=input))
|
187
|
+
|
188
|
+
raise NodeRunError(f"No route found for condition {condition}")
|
189
|
+
|
190
|
+
|
191
|
+
{% elif task.node_type == "Condition" %}
|
192
|
+
def {{task.function_name}}(input: NodeInput, _env: dict[str, str]) -> RunOutput:
|
193
|
+
condition = "{{task.config.condition}}"
|
194
|
+
|
195
|
+
if input.condition == condition:
|
196
|
+
return RunOutput(status="Success", output=input.value)
|
197
|
+
else:
|
198
|
+
return RunOutput(status="Termination", output=None)
|
199
|
+
|
200
|
+
|
201
|
+
{% elif task.node_type == "Code" %}
|
202
|
+
def {{task.function_name}}({{ task.handle_args }}, _env: dict[str, str]) -> RunOutput:
|
203
|
+
# Implement any functionality you want here
|
204
|
+
raise NodeRunError("Implement your code here")
|
205
|
+
|
206
|
+
|
171
207
|
{% elif task.node_type == "Output" %}
|
172
208
|
def {{task.function_name}}(output: NodeInput, _env: dict[str, str]) -> RunOutput:
|
173
209
|
return RunOutput(status="Success", output=output)
|
lmnr/types.py
CHANGED
@@ -9,7 +9,12 @@ class ChatMessage(pydantic.BaseModel):
|
|
9
9
|
content: str
|
10
10
|
|
11
11
|
|
12
|
-
|
12
|
+
class ConditionedValue(pydantic.BaseModel):
|
13
|
+
condition: str
|
14
|
+
value: "NodeInput"
|
15
|
+
|
16
|
+
|
17
|
+
NodeInput = Union[str, list[ChatMessage], ConditionedValue] # TypeAlias
|
13
18
|
|
14
19
|
|
15
20
|
class EndpointRunRequest(pydantic.BaseModel):
|
@@ -51,7 +56,7 @@ class SDKError(Exception):
|
|
51
56
|
super().__init__(error_message)
|
52
57
|
|
53
58
|
|
54
|
-
class
|
59
|
+
class ToolCallFunction(pydantic.BaseModel):
|
55
60
|
name: str
|
56
61
|
arguments: str
|
57
62
|
|
@@ -59,7 +64,7 @@ class ToolCallRequest(pydantic.BaseModel):
|
|
59
64
|
class ToolCall(pydantic.BaseModel):
|
60
65
|
id: Optional[str]
|
61
66
|
type: Optional[str]
|
62
|
-
function:
|
67
|
+
function: ToolCallFunction
|
63
68
|
|
64
69
|
|
65
70
|
# TODO: allow snake_case and manually convert to camelCase
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: lmnr
|
3
|
-
Version: 0.2.
|
3
|
+
Version: 0.2.7
|
4
4
|
Summary: Python SDK for Laminar AI
|
5
5
|
License: Apache-2.0
|
6
6
|
Author: lmnr.ai
|
@@ -22,7 +22,7 @@ Description-Content-Type: text/markdown
|
|
22
22
|
|
23
23
|
# Laminar AI
|
24
24
|
|
25
|
-
This
|
25
|
+
This repo provides core for code generation, Laminar CLI, and Laminar SDK.
|
26
26
|
|
27
27
|
## Quickstart
|
28
28
|
```sh
|
@@ -5,28 +5,31 @@ lmnr/cli/cli.py,sha256=pzr5LUILi7TcaJIkC-CzmT7RG7-HWApQmUpgK9bc7mI,2847
|
|
5
5
|
lmnr/cli/cookiecutter.json,sha256=PeiMMzCPzDhsapqYoAceYGPI5lOUNimvFzh5KeQv5QE,359
|
6
6
|
lmnr/cli/parser/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
7
|
lmnr/cli/parser/nodes/__init__.py,sha256=BNbbfn0WwbFDA6TNhLOaT_Ji69rCL5voUibqMD7Knng,1163
|
8
|
+
lmnr/cli/parser/nodes/code.py,sha256=GXqOxN6tdiStZGWLbN3WZCmDfzwYIgSRmZ5t72AOIXc,661
|
9
|
+
lmnr/cli/parser/nodes/condition.py,sha256=AJny0ILXbSy1hTwsRvZvDUqts9INNx63yQSkD7Dp7KU,740
|
8
10
|
lmnr/cli/parser/nodes/input.py,sha256=Xwktcih7Mezqv4cEejfPkpG8uJxDsbqRytBvKmlJDYE,578
|
9
11
|
lmnr/cli/parser/nodes/llm.py,sha256=iQWYFnQi5PcQD9WJpTSHbSzClM6s0wBOoEqyN5c4yQo,1674
|
10
12
|
lmnr/cli/parser/nodes/output.py,sha256=1XBppSscxM01kfZhE9oOh2GgdCVzyPVe2RAxLI5HmUc,665
|
13
|
+
lmnr/cli/parser/nodes/router.py,sha256=dmCx4ho8_GdFJXQa8UevMf_uEP7AKBv_MJ2zpLC6Vck,894
|
11
14
|
lmnr/cli/parser/nodes/semantic_search.py,sha256=o_XCR7BShAq8VGeKjPTwL6MxLdB07XHSd5CE71sFFiY,2105
|
12
|
-
lmnr/cli/parser/nodes/types.py,sha256=
|
13
|
-
lmnr/cli/parser/parser.py,sha256=
|
15
|
+
lmnr/cli/parser/nodes/types.py,sha256=NRhlgI3WGd86AToo-tU974DEZzbLaH4iDdP-hEEQiIo,5343
|
16
|
+
lmnr/cli/parser/parser.py,sha256=kAZEeg358lyj_Q1IIhQB_bA7LW3Aw6RduShIfUSmLqQ,2173
|
14
17
|
lmnr/cli/parser/utils.py,sha256=wVaqHVOR9VXl8Og9nkVyCVgHIcgbtYGkDOEGPtmjZ8g,715
|
15
18
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
16
19
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/__init__.py,sha256=pLVZqvDnNf9foGR-HXnX2F7WC2TWmyCTNcUctG8SXAI,27
|
17
20
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/action.py,sha256=mZMQwwPV5LtSfwdwQ7HefI3ttvwuokp4mhVI_Xn1Zck,274
|
18
|
-
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/engine.py,sha256=
|
21
|
+
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/engine.py,sha256=kCY6J7oQpm3f9YCYY2ZBzM_9bUv_XYTCRD_uFa6PLWQ,9640
|
19
22
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/state.py,sha256=wTx7jAv7b63-8k34cYfQp_DJxhCCOYT_qRHkmnZfnc0,1686
|
20
23
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/engine/task.py,sha256=ware5VIrZvluHH3mpH6h7G0YDk5L0buSQ7s09za4Fro,1200
|
21
24
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/__init__.py,sha256=bsfbNUBYtKv37qzc_GLhSAzBam2lnowP_dlr8pubhcg,80
|
22
|
-
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/nodes/functions.py,sha256=
|
25
|
+
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/nodes/functions.py,sha256=Bwu8p7m16NAyt9wC0DTQL0MrHbM44WylLs5wTLwSxBM,7845
|
23
26
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/pipelines/{{cookiecutter.pipeline_dir_name}}/{{cookiecutter.pipeline_dir_name}}.py,sha256=WG-ZMofPpGXCx5jdWVry3_XBzcKjqn8ZycFSiWEOBPg,2858
|
24
27
|
lmnr/cli/{{cookiecutter.lmnr_pipelines_dir_name}}/types.py,sha256=iWuflMV7TiaBPs6-B-BlrovvWpZgHGGHK0v8rSqER7A,997
|
25
28
|
lmnr/sdk/endpoint.py,sha256=0HjcxMUcJz-klFZO2f5xtTaoLjcaEb8vrJ_YldTWUc8,7467
|
26
29
|
lmnr/sdk/remote_debugger.py,sha256=vCpMz7y3uboOi81qEwr8z3fhQ2H1y2YtJAxXVtb6uCA,5141
|
27
|
-
lmnr/types.py,sha256=
|
28
|
-
lmnr-0.2.
|
29
|
-
lmnr-0.2.
|
30
|
-
lmnr-0.2.
|
31
|
-
lmnr-0.2.
|
32
|
-
lmnr-0.2.
|
30
|
+
lmnr/types.py,sha256=OR9xRAQ5uTTwpJTDL_e3jZqxYJWvyX96CCoxr3oo94g,2112
|
31
|
+
lmnr-0.2.7.dist-info/LICENSE,sha256=67b_wJHVV1CBaWkrKFWU1wyqTPSdzH77Ls-59631COg,10411
|
32
|
+
lmnr-0.2.7.dist-info/METADATA,sha256=Ya1KVPAiyGxAZybuXSum8wmy4l-SnyYvaTTrvQ7uZRU,5427
|
33
|
+
lmnr-0.2.7.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
34
|
+
lmnr-0.2.7.dist-info/entry_points.txt,sha256=Qg7ZRax4k-rcQsZ26XRYQ8YFSBiyY2PNxYfq4a6PYXI,41
|
35
|
+
lmnr-0.2.7.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|