quraite 0.1.2__py3-none-any.whl → 0.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. quraite/__init__.py +4 -0
  2. quraite/adapters/__init__.py +1 -1
  3. quraite/adapters/agno_adapter.py +50 -92
  4. quraite/adapters/base.py +26 -76
  5. quraite/adapters/bedrock_agents_adapter.py +23 -76
  6. quraite/adapters/flowise_adapter.py +31 -72
  7. quraite/adapters/google_adk_adapter.py +28 -94
  8. quraite/adapters/http_adapter.py +28 -44
  9. quraite/adapters/langchain_adapter.py +51 -118
  10. quraite/adapters/langchain_server_adapter.py +37 -89
  11. quraite/adapters/langflow_adapter.py +15 -60
  12. quraite/adapters/n8n_adapter.py +19 -63
  13. quraite/adapters/openai_agents_adapter.py +35 -59
  14. quraite/adapters/pydantic_ai_adapter.py +27 -97
  15. quraite/adapters/smolagents_adapter.py +21 -82
  16. quraite/constants/framework.py +14 -0
  17. quraite/schema/__init__.py +4 -0
  18. quraite/schema/invoke.py +46 -0
  19. quraite/schema/message.py +20 -21
  20. quraite/serve/__init__.py +4 -0
  21. quraite/serve/cloudflared.py +3 -2
  22. quraite/serve/server.py +305 -0
  23. quraite/tracing/__init__.py +8 -5
  24. quraite/tracing/constants.py +0 -14
  25. quraite/tracing/setup.py +129 -0
  26. quraite/tracing/span_exporter.py +6 -6
  27. quraite/tracing/span_processor.py +6 -7
  28. quraite/tracing/tool_extractors.py +1 -1
  29. quraite/tracing/trace.py +36 -24
  30. quraite/utils/json_utils.py +2 -2
  31. {quraite-0.1.2.dist-info → quraite-0.1.4.dist-info}/METADATA +54 -62
  32. quraite-0.1.4.dist-info/RECORD +37 -0
  33. quraite/schema/response.py +0 -16
  34. quraite/serve/local_agent.py +0 -360
  35. quraite/traces/traces_adk_openinference.json +0 -379
  36. quraite/traces/traces_agno_multi_agent.json +0 -669
  37. quraite/traces/traces_agno_openinference.json +0 -321
  38. quraite/traces/traces_crewai_openinference.json +0 -155
  39. quraite/traces/traces_langgraph_openinference.json +0 -349
  40. quraite/traces/traces_langgraph_openinference_multi_agent.json +0 -2705
  41. quraite/traces/traces_langgraph_traceloop.json +0 -510
  42. quraite/traces/traces_openai_agents_multi_agent_1.json +0 -402
  43. quraite/traces/traces_openai_agents_openinference.json +0 -341
  44. quraite/traces/traces_pydantic_openinference.json +0 -286
  45. quraite/traces/traces_pydantic_openinference_multi_agent_1.json +0 -399
  46. quraite/traces/traces_pydantic_openinference_multi_agent_2.json +0 -398
  47. quraite/traces/traces_smol_agents_openinference.json +0 -397
  48. quraite/traces/traces_smol_agents_tool_calling_openinference.json +0 -704
  49. quraite-0.1.2.dist-info/RECORD +0 -49
  50. {quraite-0.1.2.dist-info → quraite-0.1.4.dist-info}/WHEEL +0 -0
@@ -1,19 +1,24 @@
1
1
  import json
2
- from typing import Optional
3
2
 
4
3
  from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage
4
+ from langchain_core.runnables import RunnableConfig
5
5
  from langgraph.graph.state import CompiledStateGraph
6
- from opentelemetry.trace import TracerProvider
6
+ from opentelemetry.sdk.trace import ReadableSpan
7
+ from opentelemetry.sdk.trace import TracerProvider as SDKTracerProvider
7
8
 
8
9
  from quraite.adapters.base import BaseAdapter
10
+ from quraite.constants.framework import Framework
9
11
  from quraite.logger import get_logger
10
- from quraite.schema.message import AgentMessage, AssistantMessage, MessageContentText
12
+ from quraite.schema.invoke import InvokeInput, InvokeOutput
13
+ from quraite.schema.message import (
14
+ AgentMessage,
15
+ AssistantMessage,
16
+ MessageContentText,
17
+ ToolCall,
18
+ UserMessage,
19
+ )
11
20
  from quraite.schema.message import SystemMessage as QuraiteSystemMessage
12
- from quraite.schema.message import ToolCall
13
21
  from quraite.schema.message import ToolMessage as QuraiteToolMessage
14
- from quraite.schema.message import UserMessage
15
- from quraite.schema.response import AgentInvocationResponse
16
- from quraite.tracing.constants import Framework
17
22
  from quraite.tracing.trace import AgentSpan, AgentTrace
18
23
 
19
24
  LangchainMessage = HumanMessage | SystemMessage | AIMessage | ToolMessage
@@ -22,84 +27,33 @@ logger = get_logger(__name__)
22
27
 
23
28
 
24
29
  class LangchainAdapter(BaseAdapter):
25
- """
26
- LangChain adapter wrapper that converts any LangChain agent
27
- to a standardized callable interface (invoke) and converts the output to List[AgentMessage].
28
-
29
- This class wraps any LangChain CompiledGraph and provides:
30
- - Synchronous invocation via invoke()
31
- - Asynchronous invocation via ainvoke()
32
- - Automatic conversion to List[AgentMessage] format
33
- """
30
+ """LangChain adapter for CompiledStateGraph agents."""
34
31
 
35
32
  def __init__(
36
33
  self,
37
34
  agent_graph: CompiledStateGraph,
35
+ *,
36
+ tracer_provider: SDKTracerProvider | None = None,
38
37
  agent_name: str = "LangChain Agent",
39
- tracer_provider: Optional[TracerProvider] = None,
40
38
  ):
41
39
  """
42
- Initialize with a pre-configured LangChain agent
40
+ Initialize LangChain adapter.
43
41
 
44
42
  Args:
45
- agent_graph: Any CompiledGraph from LangChain (must have invoke/ainvoke methods)
46
- agent_name: Name of the agent for trajectory metadata
43
+ agent_graph: LangChain CompiledStateGraph
44
+ tracer_provider: Optional TracerProvider from setup_tracing()
45
+ agent_name: Agent name for metadata
47
46
  """
48
- logger.debug("Initializing LangchainAdapter with agent_name=%s", agent_name)
49
47
  self.agent_graph = agent_graph
50
48
  self.agent_name = agent_name
51
- self._init_tracing(tracer_provider, required=False)
52
- logger.info(
53
- "LangchainAdapter initialized successfully (tracing_enabled=%s)",
54
- bool(tracer_provider),
55
- )
56
-
57
- def _prepare_input(
58
- self, input: list[AgentMessage]
59
- ) -> dict[str, list[HumanMessage]]:
60
- """
61
- Prepare input for LangChain agent from List[Message].
62
-
63
- Args:
64
- input: List[AgentMessage] containing user_message
65
-
66
- Returns:
67
- Dictionary with 'messages' key containing the prepared UserMessage
68
- """
69
- logger.debug("Preparing input from %d messages", len(input))
70
-
71
- if not input or input[-1].role != "user":
72
- logger.error(
73
- "Invalid input: no user message found (input_length=%d)", len(input)
74
- )
75
- raise ValueError("No user message found in the input")
76
-
77
- last_user_message = input[-1]
78
-
79
- if not last_user_message.content:
80
- logger.error("User message has no content")
81
- raise ValueError("User message has no content")
82
-
83
- text_content = next(
84
- (
85
- content_item.text
86
- for content_item in last_user_message.content
87
- if content_item.type == "text" and content_item.text
88
- ),
89
- None,
90
- )
91
-
92
- if not text_content:
93
- logger.error("No text content found in user message")
94
- raise ValueError("No text content found in user message")
95
-
96
- logger.debug("Prepared input with text_content length=%d", len(text_content))
97
- return {"messages": [HumanMessage(content=text_content)]}
49
+ self._init_tracer(tracer_provider)
50
+ logger.info("LangchainAdapter initialized (tracing=%s)", bool(tracer_provider))
98
51
 
99
52
  def _convert_langchain_messages_to_quraite_messages(
100
53
  self,
101
54
  messages: list[LangchainMessage],
102
55
  ) -> list[AgentMessage]:
56
+ """Convert LangChain messages to Quraite format."""
103
57
  logger.debug(
104
58
  "Converting %d langchain messages to quraite format", len(messages)
105
59
  )
@@ -111,7 +65,7 @@ class LangchainAdapter(BaseAdapter):
111
65
  logger.debug("Converting SystemMessage at index %d", idx)
112
66
  converted_messages.append(
113
67
  QuraiteSystemMessage(
114
- content=[MessageContentText(type="text", text=msg.content)]
68
+ content=[MessageContentText(type="text", text=msg.content or "")]
115
69
  )
116
70
  )
117
71
 
@@ -119,7 +73,7 @@ class LangchainAdapter(BaseAdapter):
119
73
  logger.debug("Converting HumanMessage at index %d", idx)
120
74
  converted_messages.append(
121
75
  UserMessage(
122
- content=[MessageContentText(type="text", text=msg.content)]
76
+ content=[MessageContentText(type="text", text=msg.content or "")]
123
77
  )
124
78
  )
125
79
 
@@ -132,8 +86,8 @@ class LangchainAdapter(BaseAdapter):
132
86
  text_content, tool_calls = self._extract_ai_message_content(msg)
133
87
  converted_messages.append(
134
88
  AssistantMessage(
135
- content=text_content if text_content else None,
136
- tool_calls=tool_calls if tool_calls else None,
89
+ content=text_content,
90
+ tool_calls=tool_calls,
137
91
  )
138
92
  )
139
93
 
@@ -167,6 +121,7 @@ class LangchainAdapter(BaseAdapter):
167
121
  def _extract_ai_message_content(
168
122
  self, msg: AIMessage
169
123
  ) -> tuple[list[MessageContentText], list[ToolCall]]:
124
+ """Extract text and tool calls from AIMessage."""
170
125
  text_content = []
171
126
 
172
127
  if msg.content:
@@ -175,7 +130,7 @@ class LangchainAdapter(BaseAdapter):
175
130
  text_content.append(MessageContentText(type="text", text=text))
176
131
  case list():
177
132
  text_content.extend(
178
- MessageContentText(type="text", text=content.get("text"))
133
+ MessageContentText(type="text", text=content.get("text", ""))
179
134
  for content in msg.content
180
135
  if isinstance(content, dict) and content.get("type") == "text"
181
136
  )
@@ -185,57 +140,35 @@ class LangchainAdapter(BaseAdapter):
185
140
  logger.debug("Extracting %d tool calls from AIMessage", len(msg.tool_calls))
186
141
  tool_calls.extend(
187
142
  ToolCall(
188
- id=tool_call.get("id"), # type: ignore[union-attr]
189
- name=tool_call.get("name"), # type: ignore[union-attr]
190
- arguments=tool_call.get("args"), # type: ignore[union-attr]
143
+ id=tool_call.get("id", ""),
144
+ name=tool_call.get("name", ""),
145
+ arguments=tool_call.get("args", {}),
191
146
  )
192
147
  for tool_call in msg.tool_calls
193
148
  )
194
149
 
195
150
  return text_content, tool_calls
196
151
 
197
- async def ainvoke(
198
- self,
199
- input: list[AgentMessage],
200
- session_id: str | None,
201
- ) -> AgentInvocationResponse:
202
- """
203
- Asynchronous invocation method - invokes the LangChain agent and converts the output to List[AgentMessage]
204
-
205
- Args:
206
- input: List[AgentMessage] containing user_message
207
- session_id: Optional conversation ID for maintaining context
152
+ async def ainvoke(self, input: InvokeInput) -> InvokeOutput:
153
+ """Invoke LangChain agent and return response."""
154
+ logger.info("ainvoke called (session_id=%s)", input.session_id)
208
155
 
209
- Returns:
210
- List[AgentMessage] or AgentTrace - converted messages from the agent's response or trace
211
- """
212
- logger.info(
213
- "ainvoke called (session_id=%s, input_messages=%d)", session_id, len(input)
156
+ agent_input = {"messages": [HumanMessage(content=input.user_message_str())]}
157
+ config: RunnableConfig = (
158
+ {"configurable": {"thread_id": input.session_id}}
159
+ if input.session_id
160
+ else {}
214
161
  )
215
162
 
216
- try:
217
- agent_input = self._prepare_input(input)
218
- config = {"configurable": {"thread_id": session_id}} if session_id else {}
219
-
220
- if self.tracer_provider:
221
- logger.debug("Invoking with tracing enabled")
222
- return await self._ainvoke_with_tracing(agent_input, config)
223
-
224
- logger.debug("Invoking without tracing")
225
- return await self._ainvoke_without_tracing(agent_input, config)
226
-
227
- except ValueError:
228
- logger.exception("ValueError during ainvoke")
229
- raise
230
- except Exception:
231
- logger.exception("Unexpected error during ainvoke")
232
- raise
163
+ if self.tracer_provider:
164
+ return await self._ainvoke_with_tracing(agent_input, config)
165
+ return await self._ainvoke_without_tracing(agent_input, config)
233
166
 
234
167
  async def _ainvoke_with_tracing(
235
168
  self,
236
169
  agent_input: dict[str, list[HumanMessage]],
237
- config: dict,
238
- ) -> AgentInvocationResponse:
170
+ config: RunnableConfig,
171
+ ) -> InvokeOutput:
239
172
  """Execute ainvoke with tracing enabled."""
240
173
  with self.tracer.start_as_current_span("langchain_invocation") as span:
241
174
  trace_id = span.get_span_context().trace_id
@@ -245,7 +178,7 @@ class LangchainAdapter(BaseAdapter):
245
178
  )
246
179
  _ = await self.agent_graph.ainvoke(agent_input, config=config)
247
180
 
248
- trace_readable_spans = self.quraite_span_exporter.get_spans_by_trace_id(
181
+ trace_readable_spans: list[ReadableSpan] = self.quraite_span_exporter.get_spans_by_trace_id(
249
182
  trace_id
250
183
  )
251
184
 
@@ -261,19 +194,19 @@ class LangchainAdapter(BaseAdapter):
261
194
  trajectory = agent_trace.to_agent_trajectory(framework=Framework.LANGCHAIN)
262
195
  logger.debug("Generated trajectory with %d messages", len(trajectory))
263
196
 
264
- return AgentInvocationResponse(
197
+ return InvokeOutput(
265
198
  agent_trace=agent_trace,
266
199
  agent_trajectory=trajectory,
267
200
  )
268
201
 
269
202
  logger.warning("No trace spans found for trace_id=%s", trace_id)
270
- return AgentInvocationResponse()
203
+ return InvokeOutput()
271
204
 
272
205
  async def _ainvoke_without_tracing(
273
206
  self,
274
207
  agent_input: dict[str, list[HumanMessage]],
275
- config: dict,
276
- ) -> AgentInvocationResponse:
208
+ config: RunnableConfig,
209
+ ) -> InvokeOutput:
277
210
  """Execute ainvoke without tracing."""
278
211
  logger.debug("Starting non-traced invocation")
279
212
  agent_messages = []
@@ -296,10 +229,10 @@ class LangchainAdapter(BaseAdapter):
296
229
  agent_messages
297
230
  )
298
231
 
299
- return AgentInvocationResponse(
232
+ return InvokeOutput(
300
233
  agent_trajectory=agent_trajectory,
301
234
  )
302
235
 
303
236
  except ValueError:
304
237
  logger.exception("Error converting messages to List[AgentMessage]")
305
- return AgentInvocationResponse()
238
+ return InvokeOutput()
@@ -1,23 +1,21 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import Annotated, Any, List, Optional, Union
4
-
5
- from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage
3
+ from langchain_core.messages import HumanMessage, ToolMessage
6
4
  from langgraph.pregel.remote import RemoteGraph
7
5
  from langgraph_sdk import get_client, get_sync_client
8
- from pydantic import Discriminator
9
6
 
10
7
  from quraite.adapters.base import BaseAdapter
11
8
  from quraite.logger import get_logger
12
- from quraite.schema.message import AgentMessage, AssistantMessage, MessageContentText
9
+ from quraite.schema.invoke import InvokeInput, InvokeOutput
10
+ from quraite.schema.message import (
11
+ AgentMessage,
12
+ AssistantMessage,
13
+ MessageContentText,
14
+ ToolCall,
15
+ ToolMessage,
16
+ UserMessage,
17
+ )
13
18
  from quraite.schema.message import SystemMessage as QuraiteSystemMessage
14
- from quraite.schema.message import ToolCall, ToolMessage, UserMessage
15
- from quraite.schema.response import AgentInvocationResponse
16
-
17
- LangchainMessage = Annotated[
18
- Union[HumanMessage, SystemMessage, AIMessage, ToolMessage],
19
- Discriminator(discriminator="type"),
20
- ]
21
19
 
22
20
  logger = get_logger(__name__)
23
21
 
@@ -40,8 +38,8 @@ class LangchainServerAdapter(BaseAdapter):
40
38
  self,
41
39
  *,
42
40
  base_url: str,
43
- assistant_id: Optional[str] = None,
44
- graph_name: Optional[str] = None,
41
+ assistant_id: str | None = None,
42
+ graph_name: str | None = None,
45
43
  **kwargs,
46
44
  ) -> None:
47
45
  self.base_url = base_url
@@ -81,51 +79,14 @@ class LangchainServerAdapter(BaseAdapter):
81
79
  self.graph_name,
82
80
  )
83
81
 
84
- def _prepare_input(self, input: List[AgentMessage]) -> Any:
85
- """
86
- Prepare input for LangChain agent from List[AgentMessage].
87
-
88
- Args:
89
- input: List[AgentMessage] containing user_message
90
-
91
- Returns:
92
- Dict with messages list containing user_message
93
- """
94
- logger.debug("Preparing Langchain server input from %d messages", len(input))
95
- if not input or input[-1].role != "user":
96
- logger.error("Langchain server input missing user message")
97
- raise ValueError("No user message found in the input")
98
-
99
- last_user_message = input[-1]
100
- # Check if content list is not empty and has text
101
- if not last_user_message.content:
102
- logger.error("Langchain server user message missing content")
103
- raise ValueError("User message has no content")
104
-
105
- # Find the first text content item
106
- text_content = None
107
- for content_item in last_user_message.content:
108
- if content_item.type == "text" and content_item.text:
109
- text_content = content_item.text
110
- break
111
-
112
- if not text_content:
113
- logger.error("Langchain server user message missing text content")
114
- raise ValueError("No text content found in user message")
115
-
116
- logger.debug(
117
- "Prepared Langchain server input (text_length=%d)", len(text_content)
118
- )
119
- return {"messages": [HumanMessage(content=text_content).model_dump()]}
120
-
121
82
  def _convert_langchain_messages_to_quraite_messages(
122
83
  self,
123
- messages: List[dict],
124
- ) -> List[AgentMessage]:
84
+ messages: list[dict],
85
+ ) -> list[AgentMessage]:
125
86
  logger.debug(
126
87
  "Converting %d Langchain server messages to quraite format", len(messages)
127
88
  )
128
- converted_messages: List[AgentMessage] = []
89
+ converted_messages: list[AgentMessage] = []
129
90
 
130
91
  for msg in messages:
131
92
  if msg.get("type") == "system":
@@ -147,8 +108,8 @@ class LangchainServerAdapter(BaseAdapter):
147
108
  )
148
109
 
149
110
  elif msg.get("type") == "ai":
150
- text_content: List[MessageContentText] = []
151
- tool_calls_list: List[ToolCall] = []
111
+ text_content: list[MessageContentText] = []
112
+ tool_calls_list: list[ToolCall] = []
152
113
 
153
114
  # Extract text content - sometimes it's a string, sometimes a list of dicts
154
115
  content = msg.get("content")
@@ -186,12 +147,12 @@ class LangchainServerAdapter(BaseAdapter):
186
147
  )
187
148
 
188
149
  elif msg.get("type") == "tool":
189
- tool_content = msg.get("content", "")
150
+ tool_content = msg.get("content", "") or ""
190
151
  converted_messages.append(
191
152
  ToolMessage(
192
153
  tool_call_id=msg.get("tool_call_id", ""),
193
154
  content=[
194
- MessageContentText(type="text", text=str(tool_content))
155
+ MessageContentText(type="text", text=str(tool_content or ""))
195
156
  ],
196
157
  )
197
158
  )
@@ -206,47 +167,34 @@ class LangchainServerAdapter(BaseAdapter):
206
167
  )
207
168
  return converted_messages
208
169
 
209
- async def ainvoke(
210
- self,
211
- input: List[AgentMessage],
212
- session_id: Annotated[Union[str, None], "Thread ID used by LangChain API"],
213
- ) -> AgentInvocationResponse:
214
- agent_messages = []
215
- agent_input = self._prepare_input(input)
216
- if session_id:
217
- config = {"configurable": {"thread_id": session_id}}
218
- else:
219
- config = {}
170
+ async def ainvoke(self, input: InvokeInput) -> InvokeOutput:
171
+ """Invoke LangChain server agent and return response."""
172
+ logger.info("Langchain server ainvoke called (session_id=%s)", input.session_id)
173
+
174
+ agent_input = {
175
+ "messages": [HumanMessage(content=input.user_message_str()).model_dump()]
176
+ }
177
+ config = (
178
+ {"configurable": {"thread_id": input.session_id}}
179
+ if input.session_id
180
+ else {}
181
+ )
220
182
 
221
183
  try:
222
- logger.info("Langchain server ainvoke called (session_id=%s)", session_id)
184
+ agent_messages = []
223
185
  async for event in self.remote_graph.astream(agent_input, config=config):
224
186
  for _, result in event.items():
225
187
  if result.get("messages"):
226
- logger.debug(
227
- "Langchain server received %d messages from stream chunk",
228
- len(result.get("messages")),
229
- )
230
188
  agent_messages += result.get("messages")
231
189
 
232
- except Exception as e:
233
- logger.exception("Error invoking Langchain remote graph")
234
- raise RuntimeError(f"Error invoking LangChain agent: {e}") from e
235
-
236
- try:
237
- # Convert to List[AgentMessage]
238
190
  agent_trajectory = self._convert_langchain_messages_to_quraite_messages(
239
191
  agent_messages
240
192
  )
241
193
  logger.info(
242
- "Langchain server ainvoke produced %d trajectory messages",
194
+ "Langchain server produced %d trajectory messages",
243
195
  len(agent_trajectory),
244
196
  )
245
-
246
- return AgentInvocationResponse(
247
- agent_trajectory=agent_trajectory,
248
- )
249
-
250
- except ValueError:
251
- logger.exception("Langchain server conversion to AgentMessage failed")
252
- return AgentInvocationResponse()
197
+ return InvokeOutput(agent_trajectory=agent_trajectory)
198
+ except Exception as e:
199
+ logger.exception("Error invoking Langchain remote graph")
200
+ raise RuntimeError(f"Error invoking LangChain agent: {e}") from e
@@ -1,12 +1,13 @@
1
1
  import asyncio
2
2
  import json
3
3
  import uuid
4
- from typing import Any, Dict, List, Union
4
+ from typing import Any
5
5
 
6
6
  import aiohttp
7
7
 
8
8
  from quraite.adapters.base import BaseAdapter
9
9
  from quraite.logger import get_logger
10
+ from quraite.schema.invoke import InvokeInput, InvokeOutput
10
11
  from quraite.schema.message import (
11
12
  AgentMessage,
12
13
  AssistantMessage,
@@ -14,7 +15,6 @@ from quraite.schema.message import (
14
15
  ToolCall,
15
16
  ToolMessage,
16
17
  )
17
- from quraite.schema.response import AgentInvocationResponse
18
18
 
19
19
  logger = get_logger(__name__)
20
20
 
@@ -33,8 +33,8 @@ class LangflowAdapter(BaseAdapter):
33
33
 
34
34
  def _convert_api_output_to_messages(
35
35
  self,
36
- response: Dict[str, Any],
37
- ) -> List[AgentMessage]:
36
+ response: dict[str, Any],
37
+ ) -> list[AgentMessage]:
38
38
  logger.debug(
39
39
  "Converting Langflow response (root_keys=%s)",
40
40
  list(response.keys()),
@@ -45,7 +45,7 @@ class LangflowAdapter(BaseAdapter):
45
45
  contents = content_blocks[0]["contents"]
46
46
 
47
47
  # Assume everything sequential.
48
- ai_trajectory: List[AgentMessage] = []
48
+ ai_trajectory: list[AgentMessage] = []
49
49
  for step in contents:
50
50
  if step["type"] == "text":
51
51
  if step["header"]["title"] == "Input":
@@ -94,35 +94,11 @@ class LangflowAdapter(BaseAdapter):
94
94
  )
95
95
  return ai_trajectory
96
96
 
97
- def _prepare_input(self, input: List[AgentMessage]) -> str:
98
- logger.debug("Preparing Langflow input from %d messages", len(input))
99
- if not input or input[-1].role != "user":
100
- logger.error("Langflow input missing user message")
101
- raise ValueError("No user message found in the input")
102
-
103
- last_user_message = input[-1]
104
- if not last_user_message.content:
105
- logger.error("Langflow input user message missing content")
106
- raise ValueError("User message has no content")
107
-
108
- text_content = None
109
- for content_item in last_user_message.content:
110
- if content_item.type == "text" and content_item.text:
111
- text_content = content_item.text
112
- break
113
-
114
- if not text_content:
115
- logger.error("Langflow input missing text content")
116
- raise ValueError("No text content found in user message")
117
-
118
- logger.debug("Prepared Langflow input (text_length=%d)", len(text_content))
119
- return text_content
120
-
121
97
  async def _aapi_call(
122
98
  self,
123
99
  query: str,
124
100
  sessionId: str,
125
- ) -> Dict[str, Any]:
101
+ ) -> dict[str, Any]:
126
102
  payload = {
127
103
  "output_type": "chat",
128
104
  "input_type": "chat",
@@ -152,41 +128,20 @@ class LangflowAdapter(BaseAdapter):
152
128
  logger.exception("Langflow API response decoding failed")
153
129
  raise ValueError(f"Failed to decode JSON response: {e}") from e
154
130
 
155
- async def ainvoke(
156
- self,
157
- input: List[AgentMessage],
158
- session_id: Union[str, None],
159
- ) -> AgentInvocationResponse:
160
- """Asynchronous invocation method - invokes the Langflow agent and converts to List[AgentMessage]."""
161
- logger.info(
162
- "Langflow ainvoke called (session_id=%s, input_messages=%d)",
163
- session_id,
164
- len(input),
165
- )
166
- agent_input = self._prepare_input(input)
131
+ async def ainvoke(self, input: InvokeInput) -> InvokeOutput:
132
+ """Invoke Langflow agent and return response."""
133
+ logger.info("Langflow ainvoke called (session_id=%s)", input.session_id)
167
134
 
168
135
  try:
169
136
  agent_output = await self._aapi_call(
170
- query=agent_input,
171
- sessionId=session_id if session_id else uuid.uuid4(),
137
+ query=input.user_message_str(),
138
+ sessionId=input.session_id if input.session_id else str(uuid.uuid4()),
172
139
  )
173
- logger.debug(
174
- "Langflow API returned payload with top-level keys: %s",
175
- list(agent_output.keys()),
176
- )
177
- except Exception as e:
178
- logger.exception("Error calling Langflow endpoint")
179
- raise RuntimeError(f"Error calling langflow endpoint: {e}") from e
180
-
181
- try:
182
140
  agent_trajectory = self._convert_api_output_to_messages(agent_output)
183
141
  logger.info(
184
- "Langflow conversion produced %d trajectory messages",
185
- len(agent_trajectory),
186
- )
187
- return AgentInvocationResponse(
188
- agent_trajectory=agent_trajectory,
142
+ "Langflow produced %d trajectory messages", len(agent_trajectory)
189
143
  )
144
+ return InvokeOutput(agent_trajectory=agent_trajectory)
190
145
  except Exception as e:
191
- logger.exception("Error processing Langflow response")
192
- raise RuntimeError(f"Error processing langflow response: {e}") from e
146
+ logger.exception("Error calling Langflow endpoint")
147
+ raise RuntimeError(f"Error calling Langflow endpoint: {e}") from e