graphai-lib 0.0.9rc1__py3-none-any.whl → 0.0.9rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
graphai/callback.py CHANGED
@@ -1,13 +1,53 @@
1
1
  import asyncio
2
+ from dataclasses import dataclass
3
+ from enum import Enum
2
4
  from pydantic import Field
3
5
  from typing import Any
4
6
  from collections.abc import AsyncIterator
7
+ import warnings
5
8
 
6
9
 
7
10
  log_stream = True
8
11
 
9
12
 
13
+ class StrEnum(Enum):
14
+ def __str__(self) -> str:
15
+ return str(self.value)
16
+
17
+ class GraphEventType(StrEnum):
18
+ START = "start"
19
+ END = "end"
20
+ START_NODE = "start_node"
21
+ END_NODE = "end_node"
22
+ CALLBACK = "callback"
23
+
24
+ @dataclass
25
+ class GraphEvent:
26
+ """A graph event emitted for specific graph events such as start node or end node,
27
+ and used by the callback to emit user-defined events.
28
+
29
+ :param type: The type of event, can be start_node, end_node, or callback.
30
+ :type type: GraphEventType
31
+ :param identifier: The identifier of the event, this is set typically by a callback
32
+ handler and can be used to distinguish between different events. For example, a
33
+ conversation/session ID could be used.
34
+ :type identifier: str
35
+ :param token: The token associated with the event, such as LLM streamed output.
36
+ :type token: str | None
37
+ :param params: The parameters associated with the event, such as tool call parameters
38
+ or event metadata.
39
+ :type params: dict[str, Any] | None
40
+ """
41
+ type: GraphEventType
42
+ identifier: str
43
+ token: str | None = None
44
+ params: dict[str, Any] | None = None
45
+
46
+
10
47
  class Callback:
48
+ """The original callback handler class. Outputs a stream of structured text
49
+ tokens. It is recommended to use the newer `EventCallback` handler instead.
50
+ """
11
51
  identifier: str = Field(
12
52
  default="graphai",
13
53
  description=(
@@ -67,6 +107,11 @@ class Callback:
67
107
  special_token_format: str = "<{identifier}:{token}:{params}>",
68
108
  token_format: str = "{token}",
69
109
  ):
110
+ warnings.warn(
111
+ "The `Callback` class is deprecated and will be removed in " +
112
+ "v0.1.0. Use the `EventCallback` class instead.",
113
+ DeprecationWarning
114
+ )
70
115
  self.identifier = identifier
71
116
  self.special_token_format = special_token_format
72
117
  self.token_format = token_format
@@ -198,3 +243,100 @@ class Callback:
198
243
  return self.special_token_format.format(
199
244
  identifier=identifier, token=name, params=params_str
200
245
  )
246
+
247
+
248
+ class EventCallback(Callback):
249
+ """The event callback handler class. Outputs a stream of structured text
250
+ tokens. It is recommended to use the newer `EventCallback` handler instead.
251
+ """
252
+ def __init__(
253
+ self,
254
+ identifier: str = "graphai",
255
+ special_token_format: str | None = None,
256
+ token_format: str | None = None,
257
+ ):
258
+ warnings.warn(
259
+ "The `special_token_format` and `token_format` parameters are " +
260
+ "deprecated and will be removed in v0.1.0.",
261
+ DeprecationWarning
262
+ )
263
+ if special_token_format is None:
264
+ special_token_format = "<{identifier}:{token}:{params}>"
265
+ if token_format is None:
266
+ token_format = "{token}"
267
+ super().__init__(identifier, special_token_format, token_format)
268
+ self.events: list[GraphEvent] = []
269
+
270
+ def __call__(self, token: str, node_name: str | None = None):
271
+ if self._done:
272
+ raise RuntimeError("Cannot add tokens to a closed stream")
273
+ self._check_node_name(node_name=node_name)
274
+ event = GraphEvent(type=GraphEventType.CALLBACK, identifier=self.identifier, token=token, params=None)
275
+ # otherwise we just assume node is correct and send token
276
+ self.queue.put_nowait(event)
277
+
278
+ async def acall(self, token: str, node_name: str | None = None):
279
+ # TODO JB: do we need to have `node_name` param?
280
+ if self._done:
281
+ raise RuntimeError("Cannot add tokens to a closed stream")
282
+ self._check_node_name(node_name=node_name)
283
+ event = GraphEvent(type=GraphEventType.CALLBACK, identifier=self.identifier, token=token, params=None)
284
+ # otherwise we just assume node is correct and send token
285
+ self.queue.put_nowait(event)
286
+
287
+ async def aiter(self) -> AsyncIterator[GraphEvent]: # type: ignore[override]
288
+ """Used by receiver to get the tokens from the stream queue. Creates
289
+ a generator that yields tokens from the queue until the END token is
290
+ received.
291
+ """
292
+ while True: # Keep going until we see the END token
293
+ try:
294
+ if self._done and self.queue.empty():
295
+ break
296
+ token = await self.queue.get()
297
+ yield token
298
+ self.queue.task_done()
299
+ if token.type == GraphEventType.END:
300
+ break
301
+ except asyncio.CancelledError:
302
+ break
303
+ self._done = True # Mark as done after processing all tokens
304
+
305
+ async def start_node(self, node_name: str, active: bool = True):
306
+ """Starts a new node and emits the start token."""
307
+ if self._done:
308
+ raise RuntimeError("Cannot start node on a closed stream")
309
+ self.current_node_name = node_name
310
+ if self.first_token:
311
+ self.first_token = False
312
+ self.active = active
313
+ if self.active:
314
+ token = GraphEvent(type=GraphEventType.START_NODE, identifier=self.identifier, token=self.current_node_name, params=None)
315
+ self.queue.put_nowait(token)
316
+
317
+ async def end_node(self, node_name: str):
318
+ """Emits the end token for the current node."""
319
+ if self._done:
320
+ raise RuntimeError("Cannot end node on a closed stream")
321
+ # self.current_node_name = node_name
322
+ if self.active:
323
+ token = GraphEvent(type=GraphEventType.END_NODE, identifier=self.identifier, token=self.current_node_name, params=None)
324
+ self.queue.put_nowait(token)
325
+
326
+ async def close(self):
327
+ """Close the stream and prevent further tokens from being added.
328
+ This will send an END token and set the done flag to True.
329
+ """
330
+ if self._done:
331
+ return
332
+ end_token = GraphEvent(type=GraphEventType.END, identifier=self.identifier)
333
+ self._done = True # Set done before putting the end token
334
+ self.queue.put_nowait(end_token)
335
+ # Don't wait for queue.join() as it can cause deadlock
336
+ # The stream will close when aiter processes the END token
337
+
338
+ async def _build_special_token(
339
+ self, name: str, params: dict[str, Any] | None = None
340
+ ):
341
+ raise NotImplementedError("This method is not implemented for the `EventCallback` class.")
342
+
graphai/graph.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
- from typing import Any, Protocol, Type
3
- from graphlib import TopologicalSorter, CycleError
2
+ from typing import Any, Protocol
4
3
  from graphai.callback import Callback
5
4
  from graphai.utils import logger
6
5
 
@@ -63,7 +62,7 @@ class Graph:
63
62
  self.edges: list[Any] = []
64
63
  self.start_node: NodeProtocol | None = None
65
64
  self.end_nodes: list[NodeProtocol] = []
66
- self.Callback: Type[Callback] = Callback
65
+ self.Callback: type[Callback] = Callback
67
66
  self.max_steps = max_steps
68
67
  self.state = initial_state or {}
69
68
 
@@ -288,17 +287,6 @@ class Graph:
288
287
  "(src, Iterable[dst]), mapping{'source'/'destination'}, or objects with .source/.destination"
289
288
  )
290
289
 
291
- # cycle detection
292
- preds: dict[str, set[str]] = {n: set() for n in nodes.keys()}
293
- for s, ds in adj.items():
294
- for d in ds:
295
- preds[d].add(s)
296
-
297
- try:
298
- list(TopologicalSorter(preds).static_order())
299
- except CycleError as e:
300
- raise GraphCompileError("Cycle detected in graph") from e
301
-
302
290
  # reachability from start
303
291
  seen: set[str] = set()
304
292
  stack = [start_name]
@@ -388,7 +376,7 @@ class Graph:
388
376
  as the default callback when no callback is passed to the `execute` method.
389
377
 
390
378
  :param callback_class: The callback class to use as the default callback.
391
- :type callback_class: Type[Callback]
379
+ :type callback_class: type[Callback]
392
380
  """
393
381
  self.Callback = callback_class
394
382
  return self
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: graphai-lib
3
- Version: 0.0.9rc1
3
+ Version: 0.0.9rc3
4
4
  Summary: Not an AI framework
5
5
  Requires-Python: <3.14,>=3.10
6
6
  Description-Content-Type: text/markdown
@@ -1,11 +1,11 @@
1
1
  graphai/__init__.py,sha256=UbqXq7iGIYe1GyTPcpgLSXbgWovggFsAbTMtr4JQm3M,160
2
- graphai/callback.py,sha256=Wl0JCmE8NcFifKmP9-a5bFa0WKVHTdrSClHVRmIEPpc,7323
3
- graphai/graph.py,sha256=Bm_Jwa5EMUACqXTZSoibVQfHSwS1rV-ExfxqYRIkPDY,18944
2
+ graphai/callback.py,sha256=NrwArRBHWXvDodIdkDKmCAX3RlF1Zr7dGJsz1anNPgM,13195
3
+ graphai/graph.py,sha256=O0hZ_29ln8oTYnk5EX5bdhO6-Cr9pm-6IVCq-ZPWbgY,18526
4
4
  graphai/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  graphai/utils.py,sha256=LIFg-fQalU9sB5DCuk6is48OdpEgNX95i9h-YddFbvM,11717
6
6
  graphai/nodes/__init__.py,sha256=IaMUryAqTZlcEqh-ZS6A4NIYG18JZwzo145dzxsYjAk,74
7
7
  graphai/nodes/base.py,sha256=SoKfOdRu5EIJ_z8xIz5zbNXcxPI2l9MKTQDeaQI-2no,7494
8
- graphai_lib-0.0.9rc1.dist-info/METADATA,sha256=T8nS6wOKwd7ZfzVvD_h4m38bowb9zzr7TutdIz5GWJQ,913
9
- graphai_lib-0.0.9rc1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
- graphai_lib-0.0.9rc1.dist-info/top_level.txt,sha256=TXlqmhLViX-3xGH2g5w6cavRd-QMf229Hl88jdMOGt8,8
11
- graphai_lib-0.0.9rc1.dist-info/RECORD,,
8
+ graphai_lib-0.0.9rc3.dist-info/METADATA,sha256=9iXKyNdp-EsY0_D5hVTy2eixMmkqnVsFR1G3cD-NvVs,913
9
+ graphai_lib-0.0.9rc3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
+ graphai_lib-0.0.9rc3.dist-info/top_level.txt,sha256=TXlqmhLViX-3xGH2g5w6cavRd-QMf229Hl88jdMOGt8,8
11
+ graphai_lib-0.0.9rc3.dist-info/RECORD,,