quraite 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. quraite/__init__.py +3 -0
  2. quraite/adapters/__init__.py +134 -0
  3. quraite/adapters/agno_adapter.py +159 -0
  4. quraite/adapters/base.py +123 -0
  5. quraite/adapters/bedrock_agents_adapter.py +343 -0
  6. quraite/adapters/flowise_adapter.py +275 -0
  7. quraite/adapters/google_adk_adapter.py +209 -0
  8. quraite/adapters/http_adapter.py +239 -0
  9. quraite/adapters/langflow_adapter.py +192 -0
  10. quraite/adapters/langgraph_adapter.py +304 -0
  11. quraite/adapters/langgraph_server_adapter.py +252 -0
  12. quraite/adapters/n8n_adapter.py +220 -0
  13. quraite/adapters/openai_agents_adapter.py +269 -0
  14. quraite/adapters/pydantic_ai_adapter.py +312 -0
  15. quraite/adapters/smolagents_adapter.py +152 -0
  16. quraite/logger.py +62 -0
  17. quraite/schema/__init__.py +0 -0
  18. quraite/schema/message.py +54 -0
  19. quraite/schema/response.py +16 -0
  20. quraite/serve/__init__.py +1 -0
  21. quraite/serve/cloudflared.py +210 -0
  22. quraite/serve/local_agent.py +360 -0
  23. quraite/traces/traces_adk_openinference.json +379 -0
  24. quraite/traces/traces_agno_multi_agent.json +669 -0
  25. quraite/traces/traces_agno_openinference.json +321 -0
  26. quraite/traces/traces_crewai_openinference.json +155 -0
  27. quraite/traces/traces_langgraph_openinference.json +349 -0
  28. quraite/traces/traces_langgraph_openinference_multi_agent.json +2705 -0
  29. quraite/traces/traces_langgraph_traceloop.json +510 -0
  30. quraite/traces/traces_openai_agents_multi_agent_1.json +402 -0
  31. quraite/traces/traces_openai_agents_openinference.json +341 -0
  32. quraite/traces/traces_pydantic_openinference.json +286 -0
  33. quraite/traces/traces_pydantic_openinference_multi_agent_1.json +399 -0
  34. quraite/traces/traces_pydantic_openinference_multi_agent_2.json +398 -0
  35. quraite/traces/traces_smol_agents_openinference.json +397 -0
  36. quraite/traces/traces_smol_agents_tool_calling_openinference.json +704 -0
  37. quraite/tracing/__init__.py +24 -0
  38. quraite/tracing/constants.py +16 -0
  39. quraite/tracing/span_exporter.py +115 -0
  40. quraite/tracing/span_processor.py +49 -0
  41. quraite/tracing/tool_extractors.py +290 -0
  42. quraite/tracing/trace.py +494 -0
  43. quraite/tracing/types.py +179 -0
  44. quraite/tracing/utils.py +170 -0
  45. quraite/utils/__init__.py +0 -0
  46. quraite/utils/json_utils.py +269 -0
  47. quraite-0.0.1.dist-info/METADATA +44 -0
  48. quraite-0.0.1.dist-info/RECORD +49 -0
  49. quraite-0.0.1.dist-info/WHEEL +4 -0
@@ -0,0 +1,304 @@
1
+ import json
2
+ import uuid
3
+ from typing import Optional
4
+
5
+ from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage
6
+ from langgraph.graph.state import CompiledStateGraph
7
+ from opentelemetry.trace import TracerProvider
8
+
9
+ from quraite.adapters.base import BaseAdapter
10
+ from quraite.logger import get_logger
11
+ from quraite.schema.message import AgentMessage, AssistantMessage, MessageContentText
12
+ from quraite.schema.message import SystemMessage as QuraiteSystemMessage
13
+ from quraite.schema.message import ToolCall
14
+ from quraite.schema.message import ToolMessage as QuraiteToolMessage
15
+ from quraite.schema.message import UserMessage
16
+ from quraite.schema.response import AgentInvocationResponse
17
+ from quraite.tracing.constants import QURAITE_ADAPTER_TRACE_PREFIX, Framework
18
+ from quraite.tracing.trace import AgentSpan, AgentTrace
19
+
20
+ LangchainMessage = HumanMessage | SystemMessage | AIMessage | ToolMessage
21
+
22
+ logger = get_logger(__name__)
23
+
24
+
25
+ class LanggraphAdapter(BaseAdapter):
26
+ """
27
+ LangGraph adapter wrapper that converts any LangGraph agent
28
+ to a standardized callable interface (invoke) and converts the output to List[AgentMessage].
29
+
30
+ This class wraps any LangGraph CompiledGraph and provides:
31
+ - Synchronous invocation via invoke()
32
+ - Asynchronous invocation via ainvoke()
33
+ - Automatic conversion to List[AgentMessage] format
34
+ """
35
+
36
+ def __init__(
37
+ self,
38
+ agent_graph: CompiledStateGraph,
39
+ agent_name: str = "LangGraph Agent",
40
+ tracer_provider: Optional[TracerProvider] = None,
41
+ ):
42
+ """
43
+ Initialize with a pre-configured LangGraph agent
44
+
45
+ Args:
46
+ agent_graph: Any CompiledGraph from LangGraph (must have invoke/ainvoke methods)
47
+ agent_name: Name of the agent for trajectory metadata
48
+ """
49
+ logger.debug("Initializing LanggraphAdapter with agent_name=%s", agent_name)
50
+ self.agent_graph = agent_graph
51
+ self.agent_name = agent_name
52
+ self._init_tracing(tracer_provider, required=False)
53
+ logger.info(
54
+ "LanggraphAdapter initialized successfully (tracing_enabled=%s)",
55
+ bool(tracer_provider),
56
+ )
57
+
58
+ def _prepare_input(
59
+ self, input: list[AgentMessage]
60
+ ) -> dict[str, list[HumanMessage]]:
61
+ """
62
+ Prepare input for LangGraph agent from List[Message].
63
+
64
+ Args:
65
+ input: List[AgentMessage] containing user_message
66
+
67
+ Returns:
68
+ Dictionary with 'messages' key containing the prepared UserMessage
69
+ """
70
+ logger.debug("Preparing input from %d messages", len(input))
71
+
72
+ if not input or input[-1].role != "user":
73
+ logger.error(
74
+ "Invalid input: no user message found (input_length=%d)", len(input)
75
+ )
76
+ raise ValueError("No user message found in the input")
77
+
78
+ last_user_message = input[-1]
79
+
80
+ if not last_user_message.content:
81
+ logger.error("User message has no content")
82
+ raise ValueError("User message has no content")
83
+
84
+ text_content = next(
85
+ (
86
+ content_item.text
87
+ for content_item in last_user_message.content
88
+ if content_item.type == "text" and content_item.text
89
+ ),
90
+ None,
91
+ )
92
+
93
+ if not text_content:
94
+ logger.error("No text content found in user message")
95
+ raise ValueError("No text content found in user message")
96
+
97
+ logger.debug("Prepared input with text_content length=%d", len(text_content))
98
+ return {"messages": [HumanMessage(content=text_content)]}
99
+
100
+ def _convert_langchain_messages_to_quraite_messages(
101
+ self,
102
+ messages: list[LangchainMessage],
103
+ ) -> list[AgentMessage]:
104
+ logger.debug(
105
+ "Converting %d langchain messages to quraite format", len(messages)
106
+ )
107
+ converted_messages: list[AgentMessage] = []
108
+
109
+ for idx, msg in enumerate(messages):
110
+ match msg:
111
+ case SystemMessage():
112
+ logger.debug("Converting SystemMessage at index %d", idx)
113
+ converted_messages.append(
114
+ QuraiteSystemMessage(
115
+ content=[MessageContentText(type="text", text=msg.content)]
116
+ )
117
+ )
118
+
119
+ case HumanMessage():
120
+ logger.debug("Converting HumanMessage at index %d", idx)
121
+ converted_messages.append(
122
+ UserMessage(
123
+ content=[MessageContentText(type="text", text=msg.content)]
124
+ )
125
+ )
126
+
127
+ case AIMessage():
128
+ logger.debug(
129
+ "Converting AIMessage at index %d (has_tool_calls=%s)",
130
+ idx,
131
+ bool(msg.tool_calls),
132
+ )
133
+ text_content, tool_calls = self._extract_ai_message_content(msg)
134
+ converted_messages.append(
135
+ AssistantMessage(
136
+ content=text_content if text_content else None,
137
+ tool_calls=tool_calls if tool_calls else None,
138
+ )
139
+ )
140
+
141
+ case ToolMessage():
142
+ logger.debug(
143
+ "Converting ToolMessage at index %d (tool_call_id=%s)",
144
+ idx,
145
+ msg.tool_call_id,
146
+ )
147
+ if not msg.content:
148
+ tool_message_content = ""
149
+ elif isinstance(msg.content, str):
150
+ tool_message_content = msg.content
151
+ else:
152
+ tool_message_content = json.dumps(msg.content)
153
+
154
+ converted_messages.append(
155
+ QuraiteToolMessage(
156
+ tool_call_id=msg.tool_call_id,
157
+ content=[
158
+ MessageContentText(
159
+ type="text", text=tool_message_content
160
+ )
161
+ ],
162
+ )
163
+ )
164
+
165
+ logger.info("Converted %d messages successfully", len(converted_messages))
166
+ return converted_messages
167
+
168
+ def _extract_ai_message_content(
169
+ self, msg: AIMessage
170
+ ) -> tuple[list[MessageContentText], list[ToolCall]]:
171
+ text_content = []
172
+
173
+ if msg.content:
174
+ match msg.content:
175
+ case str(text):
176
+ text_content.append(MessageContentText(type="text", text=text))
177
+ case list():
178
+ text_content.extend(
179
+ MessageContentText(type="text", text=content.get("text"))
180
+ for content in msg.content
181
+ if isinstance(content, dict) and content.get("type") == "text"
182
+ )
183
+
184
+ tool_calls = []
185
+ if msg.tool_calls:
186
+ logger.debug("Extracting %d tool calls from AIMessage", len(msg.tool_calls))
187
+ tool_calls.extend(
188
+ ToolCall(
189
+ id=tool_call.get("id"), # type: ignore[union-attr]
190
+ name=tool_call.get("name"), # type: ignore[union-attr]
191
+ arguments=tool_call.get("args"), # type: ignore[union-attr]
192
+ )
193
+ for tool_call in msg.tool_calls
194
+ )
195
+
196
+ return text_content, tool_calls
197
+
198
+ async def ainvoke(
199
+ self,
200
+ input: list[AgentMessage],
201
+ session_id: str | None,
202
+ ) -> AgentInvocationResponse:
203
+ """
204
+ Asynchronous invocation method - invokes the LangGraph agent and converts the output to List[AgentMessage]
205
+
206
+ Args:
207
+ input: List[AgentMessage] containing user_message
208
+ session_id: Optional conversation ID for maintaining context
209
+
210
+ Returns:
211
+ List[AgentMessage] or AgentTrace - converted messages from the agent's response or trace
212
+ """
213
+ logger.info(
214
+ "ainvoke called (session_id=%s, input_messages=%d)", session_id, len(input)
215
+ )
216
+
217
+ try:
218
+ agent_input = self._prepare_input(input)
219
+ config = {"configurable": {"thread_id": session_id}} if session_id else {}
220
+
221
+ if self.tracer_provider:
222
+ logger.debug("Invoking with tracing enabled")
223
+ return await self._ainvoke_with_tracing(agent_input, config)
224
+
225
+ logger.debug("Invoking without tracing")
226
+ return await self._ainvoke_without_tracing(agent_input, config)
227
+
228
+ except ValueError:
229
+ logger.exception("ValueError during ainvoke")
230
+ raise
231
+ except Exception:
232
+ logger.exception("Unexpected error during ainvoke")
233
+ raise
234
+
235
+ async def _ainvoke_with_tracing(
236
+ self,
237
+ agent_input: dict[str, list[HumanMessage]],
238
+ config: dict,
239
+ ) -> AgentInvocationResponse:
240
+ """Execute ainvoke with tracing enabled."""
241
+ adapter_trace_id = f"{QURAITE_ADAPTER_TRACE_PREFIX}-{uuid.uuid4()}"
242
+ logger.debug("Starting traced invocation (trace_id=%s)", adapter_trace_id)
243
+
244
+ with self.tracer.start_as_current_span(name=adapter_trace_id):
245
+ result = await self.agent_graph.ainvoke(agent_input, config=config)
246
+
247
+ trace_readable_spans = self.quraite_span_exporter.get_trace_by_testcase(
248
+ adapter_trace_id
249
+ )
250
+
251
+ if trace_readable_spans:
252
+ logger.info("Retrieved %d spans from trace", len(trace_readable_spans))
253
+ agent_trace = AgentTrace(
254
+ spans=[
255
+ AgentSpan.from_readable_oi_span(span)
256
+ for span in trace_readable_spans
257
+ ]
258
+ )
259
+
260
+ trajectory = agent_trace.to_agent_trajectory(framework=Framework.LANGGRAPH)
261
+ logger.debug("Generated trajectory with %d messages", len(trajectory))
262
+
263
+ return AgentInvocationResponse(
264
+ agent_trace=agent_trace,
265
+ agent_trajectory=trajectory,
266
+ )
267
+
268
+ logger.warning("No trace spans found for trace_id=%s", adapter_trace_id)
269
+ return AgentInvocationResponse()
270
+
271
+ async def _ainvoke_without_tracing(
272
+ self,
273
+ agent_input: dict[str, list[HumanMessage]],
274
+ config: dict,
275
+ ) -> AgentInvocationResponse:
276
+ """Execute ainvoke without tracing."""
277
+ logger.debug("Starting non-traced invocation")
278
+ agent_messages = []
279
+
280
+ try:
281
+ async for event in self.agent_graph.astream(agent_input, config=config):
282
+ logger.debug(
283
+ "Received stream event with %d values", len(event.values())
284
+ )
285
+ for result in event.values():
286
+ if messages := result.get("messages"):
287
+ logger.debug("Processing %d messages from event", len(messages))
288
+ agent_messages.extend(messages)
289
+
290
+ logger.info(
291
+ "Streaming complete, received %d total messages", len(agent_messages)
292
+ )
293
+
294
+ agent_trajectory = self._convert_langchain_messages_to_quraite_messages(
295
+ agent_messages
296
+ )
297
+
298
+ return AgentInvocationResponse(
299
+ agent_trajectory=agent_trajectory,
300
+ )
301
+
302
+ except ValueError:
303
+ logger.exception("Error converting messages to List[AgentMessage]")
304
+ return AgentInvocationResponse()
@@ -0,0 +1,252 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Annotated, Any, List, Optional, Union
4
+
5
+ from langchain_core.messages import AIMessage, HumanMessage, SystemMessage, ToolMessage
6
+ from langgraph.pregel.remote import RemoteGraph
7
+ from langgraph_sdk import get_client, get_sync_client
8
+ from pydantic import Discriminator
9
+
10
+ from quraite.adapters.base import BaseAdapter
11
+ from quraite.logger import get_logger
12
+ from quraite.schema.message import AgentMessage, AssistantMessage, MessageContentText
13
+ from quraite.schema.message import SystemMessage as QuraiteSystemMessage
14
+ from quraite.schema.message import ToolCall, ToolMessage, UserMessage
15
+ from quraite.schema.response import AgentInvocationResponse
16
+
17
+ LangchainMessage = Annotated[
18
+ Union[HumanMessage, SystemMessage, AIMessage, ToolMessage],
19
+ Discriminator(discriminator="type"),
20
+ ]
21
+
22
+ logger = get_logger(__name__)
23
+
24
+
25
+ class LanggraphServerAdapter(BaseAdapter):
26
+ """Remote LangGraph server adapter based on langgraph-sdk.
27
+
28
+ Args:
29
+ base_url: The base URL of the LangGraph server
30
+ assistant_id: The ID of the assistant to invoke
31
+ **kwargs: Additional keyword arguments passed directly to
32
+ langgraph_sdk.get_client() and get_sync_client().
33
+ Common options include:
34
+ - api_key: API key for authentication
35
+ - headers: Additional HTTP headers
36
+ - timeout: Request timeout configuration
37
+ """
38
+
39
+ def __init__(
40
+ self,
41
+ *,
42
+ base_url: str,
43
+ assistant_id: Optional[str] = None,
44
+ graph_name: Optional[str] = None,
45
+ **kwargs,
46
+ ) -> None:
47
+ self.base_url = base_url
48
+ self.assistant_id = assistant_id
49
+ self.graph_name = graph_name
50
+
51
+ logger.debug(
52
+ "Initializing LanggraphServerAdapter (base_url=%s, assistant_id=%s, graph_name=%s)",
53
+ base_url,
54
+ assistant_id,
55
+ graph_name,
56
+ )
57
+ try:
58
+ sync_client = get_sync_client(url=self.base_url, **kwargs)
59
+ async_client = get_client(url=self.base_url, **kwargs)
60
+ if self.assistant_id:
61
+ self.remote_graph = RemoteGraph(
62
+ self.assistant_id,
63
+ url=self.base_url,
64
+ sync_client=sync_client,
65
+ client=async_client,
66
+ )
67
+ else:
68
+ self.remote_graph = RemoteGraph(
69
+ self.graph_name,
70
+ url=self.base_url,
71
+ sync_client=sync_client,
72
+ client=async_client,
73
+ )
74
+ except Exception as exc:
75
+ raise RuntimeError(
76
+ f"Failed to initialize LangGraph RemoteGraph for {self.base_url}: {exc}"
77
+ )
78
+ logger.info(
79
+ "LanggraphServerAdapter initialized (assistant_id=%s, graph_name=%s)",
80
+ self.assistant_id,
81
+ self.graph_name,
82
+ )
83
+
84
+ def _prepare_input(self, input: List[AgentMessage]) -> Any:
85
+ """
86
+ Prepare input for LangGraph agent from List[AgentMessage].
87
+
88
+ Args:
89
+ input: List[AgentMessage] containing user_message
90
+
91
+ Returns:
92
+ Dict with messages list containing user_message
93
+ """
94
+ logger.debug("Preparing Langgraph server input from %d messages", len(input))
95
+ if not input or input[-1].role != "user":
96
+ logger.error("Langgraph server input missing user message")
97
+ raise ValueError("No user message found in the input")
98
+
99
+ last_user_message = input[-1]
100
+ # Check if content list is not empty and has text
101
+ if not last_user_message.content:
102
+ logger.error("Langgraph server user message missing content")
103
+ raise ValueError("User message has no content")
104
+
105
+ # Find the first text content item
106
+ text_content = None
107
+ for content_item in last_user_message.content:
108
+ if content_item.type == "text" and content_item.text:
109
+ text_content = content_item.text
110
+ break
111
+
112
+ if not text_content:
113
+ logger.error("Langgraph server user message missing text content")
114
+ raise ValueError("No text content found in user message")
115
+
116
+ logger.debug(
117
+ "Prepared Langgraph server input (text_length=%d)", len(text_content)
118
+ )
119
+ return {"messages": [HumanMessage(content=text_content).model_dump()]}
120
+
121
+ def _convert_langchain_messages_to_quraite_messages(
122
+ self,
123
+ messages: List[dict],
124
+ ) -> List[AgentMessage]:
125
+ logger.debug(
126
+ "Converting %d Langgraph server messages to quraite format", len(messages)
127
+ )
128
+ converted_messages: List[AgentMessage] = []
129
+
130
+ for msg in messages:
131
+ if msg.get("type") == "system":
132
+ converted_messages.append(
133
+ QuraiteSystemMessage(
134
+ content=[
135
+ MessageContentText(type="text", text=msg.get("content", ""))
136
+ ],
137
+ )
138
+ )
139
+
140
+ elif msg.get("type") == "human":
141
+ converted_messages.append(
142
+ UserMessage(
143
+ content=[
144
+ MessageContentText(type="text", text=msg.get("content", ""))
145
+ ],
146
+ )
147
+ )
148
+
149
+ elif msg.get("type") == "ai":
150
+ text_content: List[MessageContentText] = []
151
+ tool_calls_list: List[ToolCall] = []
152
+
153
+ # Extract text content - sometimes it's a string, sometimes a list of dicts
154
+ content = msg.get("content")
155
+ if isinstance(content, str) and content:
156
+ text_content.append(MessageContentText(type="text", text=content))
157
+ elif isinstance(content, list):
158
+ for content_item in content:
159
+ if isinstance(content_item, dict):
160
+ if content_item.get("type") == "text" and content_item.get(
161
+ "text"
162
+ ):
163
+ text_content.append(
164
+ MessageContentText(
165
+ type="text", text=content_item.get("text")
166
+ )
167
+ )
168
+
169
+ # Extract tool calls if present
170
+ if msg.get("tool_calls"):
171
+ for tool_call in msg.get("tool_calls"):
172
+ if isinstance(tool_call, dict):
173
+ tool_calls_list.append(
174
+ ToolCall(
175
+ id=tool_call.get("id", ""),
176
+ name=tool_call.get("name", ""),
177
+ arguments=tool_call.get("args", {}),
178
+ )
179
+ )
180
+
181
+ converted_messages.append(
182
+ AssistantMessage(
183
+ content=text_content if text_content else None,
184
+ tool_calls=tool_calls_list if tool_calls_list else None,
185
+ )
186
+ )
187
+
188
+ elif msg.get("type") == "tool":
189
+ tool_content = msg.get("content", "")
190
+ converted_messages.append(
191
+ ToolMessage(
192
+ tool_call_id=msg.get("tool_call_id", ""),
193
+ content=[
194
+ MessageContentText(type="text", text=str(tool_content))
195
+ ],
196
+ )
197
+ )
198
+
199
+ else:
200
+ # Skip unsupported message types
201
+ continue
202
+
203
+ logger.info(
204
+ "Langgraph server message conversion produced %d messages",
205
+ len(converted_messages),
206
+ )
207
+ return converted_messages
208
+
209
+ async def ainvoke(
210
+ self,
211
+ input: List[AgentMessage],
212
+ session_id: Annotated[Union[str, None], "Thread ID used by LangGraph API"],
213
+ ) -> AgentInvocationResponse:
214
+ agent_messages = []
215
+ agent_input = self._prepare_input(input)
216
+ if session_id:
217
+ config = {"configurable": {"thread_id": session_id}}
218
+ else:
219
+ config = {}
220
+
221
+ try:
222
+ logger.info("Langgraph server ainvoke called (session_id=%s)", session_id)
223
+ async for event in self.remote_graph.astream(agent_input, config=config):
224
+ for _, result in event.items():
225
+ if result.get("messages"):
226
+ logger.debug(
227
+ "Langgraph server received %d messages from stream chunk",
228
+ len(result.get("messages")),
229
+ )
230
+ agent_messages += result.get("messages")
231
+
232
+ except Exception as e:
233
+ logger.exception("Error invoking Langgraph remote graph")
234
+ raise RuntimeError(f"Error invoking LangGraph agent: {e}") from e
235
+
236
+ try:
237
+ # Convert to List[AgentMessage]
238
+ agent_trajectory = self._convert_langchain_messages_to_quraite_messages(
239
+ agent_messages
240
+ )
241
+ logger.info(
242
+ "Langgraph server ainvoke produced %d trajectory messages",
243
+ len(agent_trajectory),
244
+ )
245
+
246
+ return AgentInvocationResponse(
247
+ agent_trajectory=agent_trajectory,
248
+ )
249
+
250
+ except ValueError:
251
+ logger.exception("Langgraph server conversion to AgentMessage failed")
252
+ return AgentInvocationResponse()