graphai-lib 0.0.1__tar.gz → 0.0.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,32 @@
1
+ Metadata-Version: 2.4
2
+ Name: graphai-lib
3
+ Version: 0.0.3
4
+ Summary: Not an AI framework
5
+ Requires-Python: <3.14,>=3.10
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: semantic-router>=0.1.5
8
+ Requires-Dist: networkx>=3.4.2
9
+ Requires-Dist: matplotlib>=3.10.0
10
+ Provides-Extra: dev
11
+ Requires-Dist: ipykernel>=6.25.0; extra == "dev"
12
+ Requires-Dist: ruff>=0.1.5; extra == "dev"
13
+ Requires-Dist: pytest>=8.2.0; extra == "dev"
14
+ Requires-Dist: pytest-mock>=3.12.0; extra == "dev"
15
+ Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
16
+ Requires-Dist: pytest-xdist>=3.5.0; extra == "dev"
17
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == "dev"
18
+ Requires-Dist: mypy>=1.7.1; extra == "dev"
19
+ Requires-Dist: black[jupyter]<24.5.0,>=23.12.1; extra == "dev"
20
+
21
+ # Philosophy
22
+
23
+ 1. Async-first
24
+ 2. Minimize abstractions
25
+ 3. One way to do one thing
26
+ 4. Graph-based AI
27
+
28
+ ## Installation
29
+
30
+ ```
31
+ pip install -qU graphai-lib
32
+ ```
@@ -3,4 +3,10 @@
3
3
  1. Async-first
4
4
  2. Minimize abstractions
5
5
  3. One way to do one thing
6
- 4. Graph-based AI
6
+ 4. Graph-based AI
7
+
8
+ ## Installation
9
+
10
+ ```
11
+ pip install -qU graphai-lib
12
+ ```
@@ -0,0 +1,215 @@
1
+ import asyncio
2
+ from pydantic import Field
3
+ from typing import Optional
4
+ from collections.abc import AsyncIterator
5
+ from semantic_router.utils.logger import logger
6
+
7
+
8
+ log_stream = True
9
+
10
+ class Callback:
11
+ identifier: str = Field(
12
+ default="graphai",
13
+ description=(
14
+ "The identifier for special tokens. This allows us to easily "
15
+ "identify special tokens in the stream so we can handle them "
16
+ "correctly in any downstream process."
17
+ )
18
+ )
19
+ special_token_format: str = Field(
20
+ default="<{identifier}:{token}:{params}>",
21
+ description=(
22
+ "The format for special tokens. This is used to format special "
23
+ "tokens so they can be easily identified in the stream. "
24
+ "The format is a string with three possible components:\n"
25
+ "- {identifier}: An identifier shared by all special tokens, "
26
+ "by default this is 'graphai'.\n"
27
+ "- {token}: The special token type to be streamed. This may "
28
+ "be a tool name, identifier for start/end nodes, etc.\n"
29
+ "- {params}: Any additional parameters to be streamed. The parameters "
30
+ "are formatted as a comma-separated list of key-value pairs."
31
+ ),
32
+ examples=[
33
+ "<{identifier}:{token}:{params}>",
34
+ "<[{identifier} | {token} | {params}]>",
35
+ "<{token}:{params}>"
36
+ ]
37
+ )
38
+ token_format: str = Field(
39
+ default="{token}",
40
+ description=(
41
+ "The format for streamed tokens. This is used to format the "
42
+ "tokens typically returned from LLMs. By default, no special "
43
+ "formatting is applied."
44
+ )
45
+ )
46
+ _first_token: bool = Field(
47
+ default=True,
48
+ description="Whether this is the first token in the stream.",
49
+ exclude=True
50
+ )
51
+ _current_node_name: Optional[str] = Field(
52
+ default=None,
53
+ description="The name of the current node.",
54
+ exclude=True
55
+ )
56
+ _active: bool = Field(
57
+ default=True,
58
+ description="Whether the callback is active.",
59
+ exclude=True
60
+ )
61
+ _done: bool = Field(
62
+ default=False,
63
+ description="Whether the stream is done and should be closed.",
64
+ exclude=True
65
+ )
66
+ queue: asyncio.Queue
67
+
68
+ def __init__(
69
+ self,
70
+ identifier: str = "graphai",
71
+ special_token_format: str = "<{identifier}:{token}:{params}>",
72
+ token_format: str = "{token}",
73
+ ):
74
+ self.identifier = identifier
75
+ self.special_token_format = special_token_format
76
+ self.token_format = token_format
77
+ self.queue = asyncio.Queue()
78
+ self._done = False
79
+ self._first_token = True
80
+ self._current_node_name = None
81
+ self._active = True
82
+
83
+ @property
84
+ def first_token(self) -> bool:
85
+ return self._first_token
86
+
87
+ @first_token.setter
88
+ def first_token(self, value: bool):
89
+ self._first_token = value
90
+
91
+ @property
92
+ def current_node_name(self) -> Optional[str]:
93
+ return self._current_node_name
94
+
95
+ @current_node_name.setter
96
+ def current_node_name(self, value: Optional[str]):
97
+ self._current_node_name = value
98
+
99
+ @property
100
+ def active(self) -> bool:
101
+ return self._active
102
+
103
+ @active.setter
104
+ def active(self, value: bool):
105
+ self._active = value
106
+
107
+ def __call__(self, token: str, node_name: Optional[str] = None):
108
+ if self._done:
109
+ raise RuntimeError("Cannot add tokens to a closed stream")
110
+ self._check_node_name(node_name=node_name)
111
+ # otherwise we just assume node is correct and send token
112
+ self.queue.put_nowait(token)
113
+
114
+ async def acall(self, token: str, node_name: Optional[str] = None):
115
+ # TODO JB: do we need to have `node_name` param?
116
+ if self._done:
117
+ raise RuntimeError("Cannot add tokens to a closed stream")
118
+ self._check_node_name(node_name=node_name)
119
+ # otherwise we just assume node is correct and send token
120
+ self.queue.put_nowait(token)
121
+
122
+ async def aiter(self) -> AsyncIterator[str]:
123
+ """Used by receiver to get the tokens from the stream queue. Creates
124
+ a generator that yields tokens from the queue until the END token is
125
+ received.
126
+ """
127
+ end_token = await self._build_special_token(
128
+ name="END",
129
+ params=None
130
+ )
131
+ while True: # Keep going until we see the END token
132
+ try:
133
+ if self._done and self.queue.empty():
134
+ break
135
+ token = await self.queue.get()
136
+ yield token
137
+ self.queue.task_done()
138
+ if token == end_token:
139
+ break
140
+ except asyncio.CancelledError:
141
+ break
142
+ self._done = True # Mark as done after processing all tokens
143
+
144
+ async def start_node(self, node_name: str, active: bool = True):
145
+ """Starts a new node and emits the start token.
146
+ """
147
+ if self._done:
148
+ raise RuntimeError("Cannot start node on a closed stream")
149
+ self.current_node_name = node_name
150
+ if self.first_token:
151
+ self.first_token = False
152
+ self.active = active
153
+ if self.active:
154
+ token = await self._build_special_token(
155
+ name=f"{self.current_node_name}:start",
156
+ params=None
157
+ )
158
+ self.queue.put_nowait(token)
159
+ # TODO JB: should we use two tokens here?
160
+ node_token = await self._build_special_token(
161
+ name=self.current_node_name,
162
+ params=None
163
+ )
164
+ self.queue.put_nowait(node_token)
165
+
166
+ async def end_node(self, node_name: str):
167
+ """Emits the end token for the current node.
168
+ """
169
+ if self._done:
170
+ raise RuntimeError("Cannot end node on a closed stream")
171
+ #self.current_node_name = node_name
172
+ if self.active:
173
+ node_token = await self._build_special_token(
174
+ name=f"{self.current_node_name}:end",
175
+ params=None
176
+ )
177
+ self.queue.put_nowait(node_token)
178
+
179
+ async def close(self):
180
+ """Close the stream and prevent further tokens from being added.
181
+ This will send an END token and set the done flag to True.
182
+ """
183
+ if self._done:
184
+ return
185
+ end_token = await self._build_special_token(
186
+ name="END",
187
+ params=None
188
+ )
189
+ self._done = True # Set done before putting the end token
190
+ self.queue.put_nowait(end_token)
191
+ # Don't wait for queue.join() as it can cause deadlock
192
+ # The stream will close when aiter processes the END token
193
+
194
+ def _check_node_name(self, node_name: Optional[str] = None):
195
+ if node_name:
196
+ # we confirm this is the current node
197
+ if self.current_node_name != node_name:
198
+ raise ValueError(
199
+ f"Node name mismatch: {self.current_node_name} != {node_name}"
200
+ )
201
+
202
+ async def _build_special_token(self, name: str, params: dict[str, any] | None = None):
203
+ if params:
204
+ params_str = ",".join([f"{k}={v}" for k, v in params.items()])
205
+ else:
206
+ params_str = ""
207
+ if self.identifier:
208
+ identifier = self.identifier
209
+ else:
210
+ identifier = ""
211
+ return self.special_token_format.format(
212
+ identifier=identifier,
213
+ token=name,
214
+ params=params_str
215
+ )
@@ -1,21 +1,41 @@
1
- from typing import List, Dict, Any
1
+ from typing import List, Dict, Any, Optional
2
2
  from graphai.nodes.base import _Node
3
3
  from graphai.callback import Callback
4
4
  from semantic_router.utils.logger import logger
5
5
 
6
6
 
7
7
  class Graph:
8
- def __init__(self, max_steps: int = 10):
9
- self.nodes = []
10
- self.edges = []
11
- self.start_node = None
12
- self.end_nodes = []
8
+ def __init__(self, max_steps: int = 10, initial_state: Optional[Dict[str, Any]] = None):
9
+ self.nodes: Dict[str, _Node] = {}
10
+ self.edges: List[Any] = []
11
+ self.start_node: Optional[_Node] = None
12
+ self.end_nodes: List[_Node] = []
13
13
  self.Callback = Callback
14
14
  self.callback = None
15
15
  self.max_steps = max_steps
16
+ self.state = initial_state or {}
17
+
18
+ # Allow getting and setting the graph's internal state
19
+ def get_state(self) -> Dict[str, Any]:
20
+ """Get the current graph state."""
21
+ return self.state
22
+
23
+ def set_state(self, state: Dict[str, Any]):
24
+ """Set the graph state."""
25
+ self.state = state
26
+
27
+ def update_state(self, values: Dict[str, Any]):
28
+ """Update the graph state with new values."""
29
+ self.state.update(values)
30
+
31
+ def reset_state(self):
32
+ """Reset the graph state to an empty dict."""
33
+ self.state = {}
16
34
 
17
35
  def add_node(self, node):
18
- self.nodes.append(node)
36
+ if node.name in self.nodes:
37
+ raise Exception(f"Node with name '{node.name}' already exists.")
38
+ self.nodes[node.name] = node
19
39
  if node.is_start:
20
40
  if self.start_node is not None:
21
41
  raise Exception(
@@ -27,10 +47,37 @@ class Graph:
27
47
  if node.is_end:
28
48
  self.end_nodes.append(node)
29
49
 
30
- def add_edge(self, source: _Node, destination: _Node):
31
- # TODO add logic to check that source and destination are nodes
32
- # and they exist in the graph object already
33
- edge = Edge(source, destination)
50
+ def add_edge(self, source: _Node | str, destination: _Node | str):
51
+ """Adds an edge between two nodes that already exist in the graph.
52
+
53
+ Args:
54
+ source: The source node or its name.
55
+ destination: The destination node or its name.
56
+ """
57
+ source_node, destination_node = None, None
58
+ # get source node from graph
59
+ if isinstance(source, str):
60
+ source_node = self.nodes.get(source)
61
+ else:
62
+ # Check if it's a node-like object by looking for required attributes
63
+ if hasattr(source, 'name'):
64
+ source_node = self.nodes.get(source.name)
65
+ if source_node is None:
66
+ raise ValueError(
67
+ f"Node with name '{source.name if hasattr(source, 'name') else source}' not found."
68
+ )
69
+ # get destination node from graph
70
+ if isinstance(destination, str):
71
+ destination_node = self.nodes.get(destination)
72
+ else:
73
+ # Check if it's a node-like object by looking for required attributes
74
+ if hasattr(destination, 'name'):
75
+ destination_node = self.nodes.get(destination.name)
76
+ if destination_node is None:
77
+ raise ValueError(
78
+ f"Node with name '{destination.name if hasattr(destination, 'name') else destination}' not found."
79
+ )
80
+ edge = Edge(source_node, destination_node)
34
81
  self.edges.append(edge)
35
82
 
36
83
  def add_router(self, sources: list[_Node], router: _Node, destinations: List[_Node]):
@@ -71,17 +118,20 @@ class Graph:
71
118
  self.callback = self.get_callback()
72
119
  current_node = self.start_node
73
120
  state = input
121
+ # Don't reset the graph state if it was initialized with initial_state
74
122
  steps = 0
75
123
  while True:
76
124
  # we invoke the node here
77
125
  if current_node.stream:
78
126
  # add callback tokens and param here if we are streaming
79
127
  await self.callback.start_node(node_name=current_node.name)
80
- output = await current_node.invoke(input=state, callback=self.callback)
128
+ # Include graph's internal state in the node execution context
129
+ output = await current_node.invoke(input=state, callback=self.callback, state=self.state)
81
130
  self._validate_output(output=output, node_name=current_node.name)
82
131
  await self.callback.end_node(node_name=current_node.name)
83
132
  else:
84
- output = await current_node.invoke(input=state)
133
+ # Include graph's internal state in the node execution context
134
+ output = await current_node.invoke(input=state, state=self.state)
85
135
  self._validate_output(output=output, node_name=current_node.name)
86
136
  # add output to state
87
137
  state = {**state, **output}
@@ -113,10 +163,21 @@ class Graph:
113
163
  return self.callback
114
164
 
115
165
  def _get_node_by_name(self, node_name: str) -> _Node:
116
- for node in self.nodes:
117
- if node.name == node_name:
118
- return node
119
- raise Exception(f"Node with name {node_name} not found.")
166
+ """Get a node by its name.
167
+
168
+ Args:
169
+ node_name: The name of the node to find.
170
+
171
+ Returns:
172
+ The node with the given name.
173
+
174
+ Raises:
175
+ Exception: If no node with the given name is found.
176
+ """
177
+ node = self.nodes.get(node_name)
178
+ if node is None:
179
+ raise Exception(f"Node with name {node_name} not found.")
180
+ return node
120
181
 
121
182
  def _get_next_node(self, current_node):
122
183
  for edge in self.edges:
@@ -139,7 +200,7 @@ class Graph:
139
200
 
140
201
  G = nx.DiGraph()
141
202
 
142
- for node in self.nodes:
203
+ for node in self.nodes.values():
143
204
  G.add_node(node.name)
144
205
 
145
206
  for edge in self.edges:
@@ -173,10 +234,11 @@ class Graph:
173
234
  pos[node] = (pos[node][0] - x_center, pos[node][1])
174
235
 
175
236
  # Scale the layout
176
- max_x = max(abs(p[0]) for p in pos.values())
177
- max_y = max(abs(p[1]) for p in pos.values())
178
- scale = min(0.8 / max_x, 0.8 / max_y)
179
- pos = {node: (x * scale, y * scale) for node, (x, y) in pos.items()}
237
+ max_x = max(abs(p[0]) for p in pos.values()) if pos else 1
238
+ max_y = max(abs(p[1]) for p in pos.values()) if pos else 1
239
+ if max_x > 0 and max_y > 0:
240
+ scale = min(0.8 / max_x, 0.8 / max_y)
241
+ pos = {node: (x * scale, y * scale) for node, (x, y) in pos.items()}
180
242
 
181
243
  else:
182
244
  print("Warning: The graph contains cycles. Visualization will use a spring layout.")
@@ -0,0 +1,32 @@
1
+ Metadata-Version: 2.4
2
+ Name: graphai-lib
3
+ Version: 0.0.3
4
+ Summary: Not an AI framework
5
+ Requires-Python: <3.14,>=3.10
6
+ Description-Content-Type: text/markdown
7
+ Requires-Dist: semantic-router>=0.1.5
8
+ Requires-Dist: networkx>=3.4.2
9
+ Requires-Dist: matplotlib>=3.10.0
10
+ Provides-Extra: dev
11
+ Requires-Dist: ipykernel>=6.25.0; extra == "dev"
12
+ Requires-Dist: ruff>=0.1.5; extra == "dev"
13
+ Requires-Dist: pytest>=8.2.0; extra == "dev"
14
+ Requires-Dist: pytest-mock>=3.12.0; extra == "dev"
15
+ Requires-Dist: pytest-cov>=4.1.0; extra == "dev"
16
+ Requires-Dist: pytest-xdist>=3.5.0; extra == "dev"
17
+ Requires-Dist: pytest-asyncio>=0.24.0; extra == "dev"
18
+ Requires-Dist: mypy>=1.7.1; extra == "dev"
19
+ Requires-Dist: black[jupyter]<24.5.0,>=23.12.1; extra == "dev"
20
+
21
+ # Philosophy
22
+
23
+ 1. Async-first
24
+ 2. Minimize abstractions
25
+ 3. One way to do one thing
26
+ 4. Graph-based AI
27
+
28
+ ## Installation
29
+
30
+ ```
31
+ pip install -qU graphai-lib
32
+ ```
@@ -0,0 +1,11 @@
1
+ README.md
2
+ pyproject.toml
3
+ graphai/__init__.py
4
+ graphai/callback.py
5
+ graphai/graph.py
6
+ graphai/utils.py
7
+ graphai_lib.egg-info/PKG-INFO
8
+ graphai_lib.egg-info/SOURCES.txt
9
+ graphai_lib.egg-info/dependency_links.txt
10
+ graphai_lib.egg-info/requires.txt
11
+ graphai_lib.egg-info/top_level.txt
@@ -0,0 +1,14 @@
1
+ semantic-router>=0.1.5
2
+ networkx>=3.4.2
3
+ matplotlib>=3.10.0
4
+
5
+ [dev]
6
+ ipykernel>=6.25.0
7
+ ruff>=0.1.5
8
+ pytest>=8.2.0
9
+ pytest-mock>=3.12.0
10
+ pytest-cov>=4.1.0
11
+ pytest-xdist>=3.5.0
12
+ pytest-asyncio>=0.24.0
13
+ mypy>=1.7.1
14
+ black[jupyter]<24.5.0,>=23.12.1
@@ -0,0 +1 @@
1
+ graphai
@@ -0,0 +1,31 @@
1
+ [project]
2
+ name = "graphai-lib"
3
+ version = "0.0.3"
4
+ description = "Not an AI framework"
5
+ readme = "README.md"
6
+ requires-python = ">=3.10,<3.14"
7
+ dependencies = [
8
+ "semantic-router>=0.1.5",
9
+ "networkx>=3.4.2",
10
+ "matplotlib>=3.10.0",
11
+ ]
12
+
13
+ [project.optional-dependencies]
14
+ dev = [
15
+ "ipykernel>=6.25.0",
16
+ "ruff>=0.1.5",
17
+ "pytest>=8.2.0",
18
+ "pytest-mock>=3.12.0",
19
+ "pytest-cov>=4.1.0",
20
+ "pytest-xdist>=3.5.0",
21
+ "pytest-asyncio>=0.24.0",
22
+ "mypy>=1.7.1",
23
+ "black[jupyter]>=23.12.1,<24.5.0",
24
+ ]
25
+
26
+ [build-system]
27
+ requires = ["setuptools>=61.0"]
28
+ build-backend = "setuptools.build_meta"
29
+
30
+ [tool.setuptools.packages.find]
31
+ include = ["graphai"]
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
@@ -1,25 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: graphai-lib
3
- Version: 0.0.1
4
- Summary:
5
- License: MIT
6
- Author: Aurelio AI
7
- Author-email: hello@aurelio.ai
8
- Requires-Python: >=3.10,<3.14
9
- Classifier: License :: OSI Approved :: MIT License
10
- Classifier: Programming Language :: Python :: 3
11
- Classifier: Programming Language :: Python :: 3.10
12
- Classifier: Programming Language :: Python :: 3.11
13
- Classifier: Programming Language :: Python :: 3.12
14
- Classifier: Programming Language :: Python :: 3.13
15
- Requires-Dist: matplotlib (>=3.10.0,<4.0.0)
16
- Requires-Dist: networkx (>=3.4.2,<4.0.0)
17
- Requires-Dist: semantic-router (>=0.1.0.dev4)
18
- Description-Content-Type: text/markdown
19
-
20
- # Philosophy
21
-
22
- 1. Async-first
23
- 2. Minimize abstractions
24
- 3. One way to do one thing
25
- 4. Graph-based AI
@@ -1,63 +0,0 @@
1
- import asyncio
2
- from typing import Optional
3
- from collections.abc import AsyncIterator
4
- from semantic_router.utils.logger import logger
5
-
6
-
7
- log_stream = True
8
-
9
- class Callback:
10
- first_token = True
11
- current_node_name: Optional[str] = None
12
- active: bool = True
13
- queue: asyncio.Queue
14
-
15
- def __init__(self):
16
- self.queue = asyncio.Queue()
17
-
18
- def __call__(self, token: str, node_name: Optional[str] = None):
19
- self._check_node_name(node_name=node_name)
20
- # otherwise we just assume node is correct and send token
21
- self.queue.put_nowait(token)
22
-
23
- async def acall(self, token: str, node_name: Optional[str] = None):
24
- self._check_node_name(node_name=node_name)
25
- # otherwise we just assume node is correct and send token
26
- self.queue.put_nowait(token)
27
-
28
- async def aiter(self) -> AsyncIterator[str]:
29
- """Used by receiver to get the tokens from the stream queue. Creates
30
- a generator that yields tokens from the queue until the END token is
31
- received.
32
- """
33
- while True:
34
- token = await self.queue.get()
35
- yield token
36
- self.queue.task_done()
37
- if token == "<graphai:END>":
38
- break
39
-
40
- async def start_node(self, node_name: str, active: bool = True):
41
- self.current_node_name = node_name
42
- if self.first_token:
43
- # TODO JB: not sure if we need self.first_token
44
- self.first_token = False
45
- self.active = active
46
- if self.active:
47
- self.queue.put_nowait(f"<graphai:start:{node_name}>")
48
-
49
- async def end_node(self, node_name: str):
50
- self.current_node_name = None
51
- if self.active:
52
- self.queue.put_nowait(f"<graphai:end:{node_name}>")
53
-
54
- async def close(self):
55
- self.queue.put_nowait("<graphai:END>")
56
-
57
- def _check_node_name(self, node_name: Optional[str] = None):
58
- if node_name:
59
- # we confirm this is the current node
60
- if self.current_node_name != node_name:
61
- raise ValueError(
62
- f"Node name mismatch: {self.current_node_name} != {node_name}"
63
- )
@@ -1,3 +0,0 @@
1
- from graphai.nodes.base import node, router
2
-
3
- __all__ = ["node", "router"]
@@ -1,148 +0,0 @@
1
- import inspect
2
- from typing import Any, Callable, Dict, Optional
3
-
4
- from graphai.callback import Callback
5
- from graphai.utils import FunctionSchema
6
-
7
-
8
- class NodeMeta(type):
9
- @staticmethod
10
- def positional_to_kwargs(cls_type, args) -> Dict[str, Any]:
11
- init_signature = inspect.signature(cls_type.__init__)
12
- init_params = {name: arg for name, arg in init_signature.parameters.items() if name != "self"}
13
- return init_params
14
-
15
- def __call__(cls, *args, **kwargs):
16
- named_positional_args = NodeMeta.positional_to_kwargs(cls, args)
17
- kwargs.update(named_positional_args)
18
- return super().__call__(**kwargs)
19
-
20
-
21
- class _Node:
22
- def __init__(
23
- self,
24
- is_router: bool = False,
25
- ):
26
- self.is_router = is_router
27
-
28
- def _node(
29
- self,
30
- func: Callable,
31
- start: bool = False,
32
- end: bool = False,
33
- stream: bool = False,
34
- ) -> Callable:
35
- """Decorator validating node structure.
36
- """
37
- if not callable(func):
38
- raise ValueError("Node must be a callable function.")
39
-
40
- func_signature = inspect.signature(func)
41
- schema = FunctionSchema(func)
42
-
43
- class NodeClass:
44
- _func_signature = func_signature
45
- is_router = None
46
- _stream = stream
47
-
48
- def __init__(self):
49
- self._expected_params = set(self._func_signature.parameters.keys())
50
-
51
- async def execute(self, *args, **kwargs):
52
- # Prepare arguments, including callback if stream is True
53
- params_dict = await self._parse_params(*args, **kwargs)
54
- return await func(**params_dict) # Pass only the necessary arguments
55
-
56
- async def _parse_params(self, *args, **kwargs) -> Dict[str, Any]:
57
- # filter out unexpected keyword args
58
- expected_kwargs = {k: v for k, v in kwargs.items() if k in self._expected_params}
59
- # Convert args to kwargs based on the function signature
60
- args_names = list(self._func_signature.parameters.keys())[1:len(args)+1] # skip 'self'
61
- expected_args_kwargs = dict(zip(args_names, args))
62
- # Combine filtered args and kwargs
63
- combined_params = {**expected_args_kwargs, **expected_kwargs}
64
-
65
- # Bind the current instance attributes to the function signature
66
- if "callback" in self._expected_params and not stream:
67
- raise ValueError(
68
- f"Node {func.__name__}: requires stream=True when callback is defined."
69
- )
70
- bound_params = self._func_signature.bind_partial(**combined_params)
71
- # get the default parameters (if any)
72
- bound_params.apply_defaults()
73
- params_dict = bound_params.arguments.copy()
74
- # Filter arguments to match the next node's parameters
75
- filtered_params = {
76
- k: v for k, v in params_dict.items() if k in self._expected_params
77
- }
78
- # confirm all required parameters are present
79
- missing_params = [
80
- p for p in self._expected_params if p not in filtered_params
81
- ]
82
- # if anything is missing we raise an error
83
- if missing_params:
84
- raise ValueError(
85
- f"Missing required parameters for the {func.__name__} node: {', '.join(missing_params)}"
86
- )
87
- return filtered_params
88
-
89
-
90
- @classmethod
91
- def get_signature(cls):
92
- """Returns the signature of the decorated function as LLM readable
93
- string.
94
- """
95
- signature_components = []
96
- if NodeClass._func_signature:
97
- for param in NodeClass._func_signature.parameters.values():
98
- if param.default is param.empty:
99
- signature_components.append(f"{param.name}: {param.annotation}")
100
- else:
101
- signature_components.append(f"{param.name}: {param.annotation} = {param.default}")
102
- else:
103
- return "No signature"
104
- return "\n".join(signature_components)
105
-
106
- @classmethod
107
- async def invoke(cls, input: Dict[str, Any], callback: Optional[Callback] = None):
108
- if callback:
109
- if stream:
110
- input["callback"] = callback
111
- else:
112
- raise ValueError(
113
- f"Error in node {func.__name__}. When callback provided, stream must be True."
114
- )
115
- instance = cls()
116
- out = await instance.execute(**input)
117
- return out
118
-
119
- NodeClass.__name__ = func.__name__
120
- NodeClass.name = func.__name__
121
- NodeClass.__doc__ = func.__doc__
122
- NodeClass.is_start = start
123
- NodeClass.is_end = end
124
- NodeClass.is_router = self.is_router
125
- NodeClass.stream = stream
126
- NodeClass.schema = schema
127
- return NodeClass
128
-
129
- def __call__(
130
- self,
131
- func: Optional[Callable] = None,
132
- start: bool = False,
133
- end: bool = False,
134
- stream: bool = False,
135
- ):
136
- # We must wrap the call to the decorator in a function for it to work
137
- # correctly with or without parenthesis
138
- def wrap(func: Callable, start=start, end=end, stream=stream) -> Callable:
139
- return self._node(func=func, start=start, end=end, stream=stream)
140
- if func:
141
- # Decorator is called without parenthesis
142
- return wrap(func=func, start=start, end=end, stream=stream)
143
- # Decorator is called with parenthesis
144
- return wrap
145
-
146
-
147
- node = _Node()
148
- router = _Node(is_router=True)
@@ -1,29 +0,0 @@
1
- [tool.poetry]
2
- name = "graphai-lib"
3
- version = "0.0.1"
4
- description = ""
5
- authors = ["Aurelio AI <hello@aurelio.ai>"]
6
- readme = "README.md"
7
- packages = [{include = "graphai"}]
8
- license = "MIT"
9
-
10
- [tool.poetry.dependencies]
11
- python = ">=3.10,<3.14"
12
- semantic-router = ">=0.1.0.dev4"
13
- networkx = "^3.4.2"
14
- matplotlib = "^3.10.0"
15
-
16
- [tool.poetry.group.dev.dependencies]
17
- ipykernel = "^6.25.0"
18
- ruff = "^0.1.5"
19
- pytest = "^8.2"
20
- pytest-mock = "^3.12.0"
21
- pytest-cov = "^4.1.0"
22
- pytest-xdist = "^3.5.0"
23
- pytest-asyncio = "^0.24.0"
24
- mypy = "^1.7.1"
25
- black = {extras = ["jupyter"], version = ">=23.12.1,<24.5.0"}
26
-
27
- [build-system]
28
- requires = ["poetry-core"]
29
- build-backend = "poetry.core.masonry.api"