livekit-plugins-langchain 1.2.6__py3-none-any.whl → 1.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of livekit-plugins-langchain might be problematic. Click here for more details.

@@ -38,10 +38,12 @@ class LLMAdapter(llm.LLM):
38
38
  graph: PregelProtocol,
39
39
  *,
40
40
  config: RunnableConfig | None = None,
41
+ subgraphs: bool = False,
41
42
  ) -> None:
42
43
  super().__init__()
43
44
  self._graph = graph
44
45
  self._config = config
46
+ self._subgraphs = subgraphs
45
47
 
46
48
  def chat(
47
49
  self,
@@ -61,6 +63,7 @@ class LLMAdapter(llm.LLM):
61
63
  graph=self._graph,
62
64
  conn_options=conn_options,
63
65
  config=self._config,
66
+ subgraphs=self._subgraphs,
64
67
  )
65
68
 
66
69
 
@@ -74,6 +77,7 @@ class LangGraphStream(llm.LLMStream):
74
77
  conn_options: APIConnectOptions,
75
78
  graph: PregelProtocol,
76
79
  config: RunnableConfig | None = None,
80
+ subgraphs: bool = False,
77
81
  ):
78
82
  super().__init__(
79
83
  llm,
@@ -83,16 +87,33 @@ class LangGraphStream(llm.LLMStream):
83
87
  )
84
88
  self._graph = graph
85
89
  self._config = config
90
+ self._subgraphs = subgraphs
86
91
 
87
92
  async def _run(self) -> None:
88
93
  state = self._chat_ctx_to_state()
89
94
 
90
- async for message_chunk, _ in self._graph.astream(
91
- state,
92
- self._config,
93
- stream_mode="messages",
94
- ):
95
- chat_chunk = _to_chat_chunk(message_chunk)
95
+ # Some LangGraph versions don't accept the `subgraphs` kwarg yet.
96
+ # Try with it first; fall back gracefully if it's unsupported.
97
+ try:
98
+ aiter = self._graph.astream(
99
+ state,
100
+ self._config,
101
+ stream_mode="messages",
102
+ subgraphs=self._subgraphs,
103
+ )
104
+ except TypeError:
105
+ aiter = self._graph.astream(
106
+ state,
107
+ self._config,
108
+ stream_mode="messages",
109
+ )
110
+
111
+ async for item in aiter:
112
+ token_like = _extract_message_chunk(item)
113
+ if token_like is None:
114
+ continue
115
+
116
+ chat_chunk = _to_chat_chunk(token_like)
96
117
  if chat_chunk:
97
118
  self._event_ch.send_nowait(chat_chunk)
98
119
 
@@ -112,9 +133,48 @@ class LangGraphStream(llm.LLMStream):
112
133
  elif item.role in ["system", "developer"]:
113
134
  messages.append(SystemMessage(content=content, id=item.id))
114
135
 
115
- return {
116
- "messages": messages,
117
- }
136
+ return {"messages": messages}
137
+
138
+
139
+ def _extract_message_chunk(item: Any) -> BaseMessageChunk | str | None:
140
+ """
141
+ Normalize outputs from graph.astream(..., stream_mode='messages', [subgraphs]).
142
+
143
+ Expected shapes:
144
+ - (token, meta)
145
+ - (namespace, (token, meta)) # with subgraphs=True
146
+ - (mode, (token, meta)) # future-friendly
147
+ - (namespace, mode, (token, meta)) # future-friendly
148
+ Also tolerate direct token-like values for robustness.
149
+ """
150
+ # Already a token-like thing?
151
+ if isinstance(item, (BaseMessageChunk, str)):
152
+ return item
153
+
154
+ if not isinstance(item, tuple):
155
+ return None
156
+
157
+ # token is usually BaseMessageChunk, but could be a str
158
+ # (token, meta)
159
+ if len(item) == 2 and not isinstance(item[1], tuple):
160
+ token, _meta = item
161
+ return token # type: ignore
162
+
163
+ # (namespace, (token, meta)) OR (mode, (token, meta))
164
+ if len(item) == 2 and isinstance(item[1], tuple):
165
+ inner = item[1]
166
+ if len(inner) == 2:
167
+ token, _meta = inner
168
+ return token # type: ignore
169
+
170
+ # (namespace, mode, (token, meta))
171
+ if len(item) == 3 and isinstance(item[2], tuple):
172
+ inner = item[2]
173
+ if len(inner) == 2:
174
+ token, _meta = inner
175
+ return token # type: ignore
176
+
177
+ return None
118
178
 
119
179
 
120
180
  def _to_chat_chunk(msg: str | Any) -> llm.ChatChunk | None:
@@ -125,8 +185,8 @@ def _to_chat_chunk(msg: str | Any) -> llm.ChatChunk | None:
125
185
  content = msg
126
186
  elif isinstance(msg, BaseMessageChunk):
127
187
  content = msg.text()
128
- if msg.id:
129
- message_id = msg.id
188
+ if getattr(msg, "id", None):
189
+ message_id = msg.id # type: ignore
130
190
 
131
191
  if not content:
132
192
  return None
@@ -12,4 +12,4 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- __version__ = "1.2.6"
15
+ __version__ = "1.2.8"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: livekit-plugins-langchain
3
- Version: 1.2.6
3
+ Version: 1.2.8
4
4
  Summary: LangChain/LangGraph plugin for LiveKit agents
5
5
  Project-URL: Documentation, https://docs.livekit.io
6
6
  Project-URL: Website, https://livekit.io/
@@ -19,7 +19,7 @@ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
19
19
  Requires-Python: >=3.9.0
20
20
  Requires-Dist: langchain-core>=0.3.0
21
21
  Requires-Dist: langgraph>=0.3.0
22
- Requires-Dist: livekit-agents>=1.2.6
22
+ Requires-Dist: livekit-agents>=1.2.8
23
23
  Description-Content-Type: text/markdown
24
24
 
25
25
  # LangChain plugin for LiveKit Agents
@@ -0,0 +1,7 @@
1
+ livekit/plugins/langchain/__init__.py,sha256=orJAv6qYRWJatEdyMlf9D93GXxeK-ELYa9XIqYF76hw,1140
2
+ livekit/plugins/langchain/langgraph.py,sha256=SXZTRBY7vOwIwwITrSoG0_ZR76UKqWh5rRr1R1NPOgI,6448
3
+ livekit/plugins/langchain/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ livekit/plugins/langchain/version.py,sha256=A0WkWSMcyRjWChIOCaf9rjKCQpO2BmwPf6v0PURlv-k,600
5
+ livekit_plugins_langchain-1.2.8.dist-info/METADATA,sha256=07Fr7eYO76M4CH9w4vzuSgvpwas2KvVacvHsGi5lvdI,1735
6
+ livekit_plugins_langchain-1.2.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
+ livekit_plugins_langchain-1.2.8.dist-info/RECORD,,
@@ -1,7 +0,0 @@
1
- livekit/plugins/langchain/__init__.py,sha256=orJAv6qYRWJatEdyMlf9D93GXxeK-ELYa9XIqYF76hw,1140
2
- livekit/plugins/langchain/langgraph.py,sha256=KxbK6AbAkf_2oZ6eACCP9J3jZ-p1SqsTG2NBzLWDK-s,4408
3
- livekit/plugins/langchain/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- livekit/plugins/langchain/version.py,sha256=rmFW-1Nh90Zrz7VO8AXjb-TFa4_KVV79YegUSrx9qbw,600
5
- livekit_plugins_langchain-1.2.6.dist-info/METADATA,sha256=1_UOr96PUbT6Fo9BAKh95_enxHji0g4s_cf9LUAcAeg,1735
6
- livekit_plugins_langchain-1.2.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
7
- livekit_plugins_langchain-1.2.6.dist-info/RECORD,,