idun-agent-engine 0.1.0__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. idun_agent_engine/__init__.py +2 -25
  2. idun_agent_engine/_version.py +1 -1
  3. idun_agent_engine/agent/__init__.py +10 -0
  4. idun_agent_engine/agent/base.py +97 -0
  5. idun_agent_engine/agent/haystack/__init__.py +9 -0
  6. idun_agent_engine/agent/haystack/haystack.py +261 -0
  7. idun_agent_engine/agent/haystack/haystack_model.py +13 -0
  8. idun_agent_engine/agent/haystack/utils.py +13 -0
  9. idun_agent_engine/agent/langgraph/__init__.py +7 -0
  10. idun_agent_engine/agent/langgraph/langgraph.py +429 -0
  11. idun_agent_engine/cli/__init__.py +16 -0
  12. idun_agent_engine/core/__init__.py +11 -0
  13. idun_agent_engine/core/app_factory.py +63 -0
  14. idun_agent_engine/core/config_builder.py +456 -0
  15. idun_agent_engine/core/engine_config.py +22 -0
  16. idun_agent_engine/core/server_runner.py +146 -0
  17. idun_agent_engine/observability/__init__.py +13 -0
  18. idun_agent_engine/observability/base.py +111 -0
  19. idun_agent_engine/observability/langfuse/__init__.py +5 -0
  20. idun_agent_engine/observability/langfuse/langfuse_handler.py +72 -0
  21. idun_agent_engine/observability/phoenix/__init__.py +5 -0
  22. idun_agent_engine/observability/phoenix/phoenix_handler.py +65 -0
  23. idun_agent_engine/observability/phoenix_local/__init__.py +5 -0
  24. idun_agent_engine/observability/phoenix_local/phoenix_local_handler.py +123 -0
  25. idun_agent_engine/py.typed +0 -1
  26. idun_agent_engine/server/__init__.py +5 -0
  27. idun_agent_engine/server/dependencies.py +23 -0
  28. idun_agent_engine/server/lifespan.py +42 -0
  29. idun_agent_engine/server/routers/__init__.py +5 -0
  30. idun_agent_engine/server/routers/agent.py +68 -0
  31. idun_agent_engine/server/routers/base.py +60 -0
  32. idun_agent_engine/server/server_config.py +8 -0
  33. idun_agent_engine-0.2.1.dist-info/METADATA +278 -0
  34. idun_agent_engine-0.2.1.dist-info/RECORD +35 -0
  35. {idun_agent_engine-0.1.0.dist-info → idun_agent_engine-0.2.1.dist-info}/WHEEL +1 -1
  36. idun_agent_engine-0.1.0.dist-info/METADATA +0 -317
  37. idun_agent_engine-0.1.0.dist-info/RECORD +0 -6
@@ -0,0 +1,429 @@
1
+ """LangGraph agent adapter implementing the BaseAgent protocol."""
2
+
3
+ import importlib.util
4
+ import uuid
5
+ from collections.abc import AsyncGenerator
6
+ from typing import Any
7
+
8
+ import aiosqlite
9
+ from ag_ui.core import events as ag_events
10
+ from ag_ui.core import types as ag_types
11
+ from idun_agent_schema.engine.langgraph import (
12
+ LangGraphAgentConfig,
13
+ SqliteCheckpointConfig,
14
+ )
15
+ from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver
16
+ from langgraph.graph import StateGraph
17
+
18
+ from idun_agent_engine import observability
19
+ from idun_agent_engine.agent import base as agent_base
20
+
21
+
22
+ class LanggraphAgent(agent_base.BaseAgent):
23
+ """LangGraph agent adapter implementing the BaseAgent protocol."""
24
+
25
+ def __init__(self):
26
+ """Initialize an unconfigured LanggraphAgent with default state."""
27
+ self._id = str(uuid.uuid4())
28
+ self._agent_type = "LangGraph"
29
+ self._input_schema: Any = None
30
+ self._output_schema: Any = None
31
+ self._agent_instance: Any = None
32
+ self._checkpointer: Any = None
33
+ self._store: Any = None
34
+ self._connection: Any = None
35
+ self._configuration: LangGraphAgentConfig | None = None
36
+ self._name: str = "Unnamed LangGraph Agent"
37
+ self._infos: dict[str, Any] = {
38
+ "status": "Uninitialized",
39
+ "name": self._name,
40
+ "id": self._id,
41
+ }
42
+ # Observability (provider-agnostic)
43
+ self._obs_callbacks: list[Any] | None = None
44
+ self._obs_run_name: str | None = None
45
+
46
+ @property
47
+ def id(self) -> str:
48
+ """Return unique identifier for this agent instance."""
49
+ return self._id
50
+
51
+ @property
52
+ def agent_type(self) -> str:
53
+ """Return agent type label."""
54
+ return self._agent_type
55
+
56
+ @property
57
+ def name(self) -> str:
58
+ """Return configured human-readable agent name."""
59
+ return self._name
60
+
61
+ @property
62
+ def input_schema(self) -> Any:
63
+ """Return input schema provided by underlying graph if available."""
64
+ return self._input_schema
65
+
66
+ @property
67
+ def output_schema(self) -> Any:
68
+ """Return output schema provided by underlying graph if available."""
69
+ return self._output_schema
70
+
71
+ @property
72
+ def agent_instance(self) -> Any:
73
+ """Return compiled graph instance.
74
+
75
+ Raises:
76
+ RuntimeError: If the agent is not yet initialized.
77
+ """
78
+ if self._agent_instance is None:
79
+ raise RuntimeError("Agent not initialized. Call initialize() first.")
80
+ return self._agent_instance
81
+
82
+ @property
83
+ def configuration(self) -> LangGraphAgentConfig:
84
+ """Return validated configuration.
85
+
86
+ Raises:
87
+ RuntimeError: If the agent has not been configured yet.
88
+ """
89
+ if not self._configuration:
90
+ raise RuntimeError("Agent not configured. Call initialize() first.")
91
+ return self._configuration
92
+
93
+ @property
94
+ def infos(self) -> dict[str, Any]:
95
+ """Return diagnostic information about the agent instance."""
96
+ self._infos["underlying_agent_type"] = (
97
+ str(type(self._agent_instance)) if self._agent_instance else "N/A"
98
+ )
99
+ return self._infos
100
+
101
+ async def initialize(self, config: LangGraphAgentConfig) -> None:
102
+ """Initialize the LangGraph agent asynchronously."""
103
+ self._configuration = LangGraphAgentConfig.model_validate(config)
104
+
105
+ self._name = self._configuration.name or "Unnamed LangGraph Agent"
106
+ self._infos["name"] = self._name
107
+
108
+ await self._setup_persistence()
109
+
110
+ # Observability (provider-agnostic). Prefer generic block; fallback to legacy langfuse block.
111
+ obs_cfg = None
112
+ try:
113
+ if getattr(self._configuration, "observability", None):
114
+ obs_cfg = self._configuration.observability.resolved() # type: ignore[attr-defined]
115
+ elif getattr(self._configuration, "langfuse", None):
116
+ lf = self._configuration.langfuse.resolved() # type: ignore[attr-defined]
117
+ obs_cfg = type(
118
+ "_Temp",
119
+ (),
120
+ {
121
+ "provider": "langfuse",
122
+ "enabled": lf.enabled,
123
+ "options": {
124
+ "host": lf.host,
125
+ "public_key": lf.public_key,
126
+ "secret_key": lf.secret_key,
127
+ "run_name": lf.run_name,
128
+ },
129
+ },
130
+ )()
131
+ except Exception:
132
+ obs_cfg = None
133
+
134
+ if obs_cfg and getattr(obs_cfg, "enabled", False):
135
+ provider = getattr(obs_cfg, "provider", None)
136
+ options = dict(getattr(obs_cfg, "options", {}) or {})
137
+ # Fallback: if using Langfuse and run_name is not provided, use agent name
138
+ if provider == "langfuse" and not options.get("run_name"):
139
+ options["run_name"] = self._name
140
+
141
+ handler, info = observability.create_observability_handler(
142
+ {
143
+ "provider": provider,
144
+ "enabled": True,
145
+ "options": options,
146
+ }
147
+ )
148
+ if handler:
149
+ self._obs_callbacks = handler.get_callbacks()
150
+ self._obs_run_name = handler.get_run_name()
151
+ if info:
152
+ self._infos["observability"] = dict(info)
153
+
154
+ graph_builder = self._load_graph_builder(self._configuration.graph_definition)
155
+ self._infos["graph_definition"] = self._configuration.graph_definition
156
+
157
+ self._agent_instance = graph_builder.compile(
158
+ checkpointer=self._checkpointer, store=self._store
159
+ )
160
+
161
+ if self._agent_instance:
162
+ self._input_schema = self._agent_instance.input_schema
163
+ self._output_schema = self._agent_instance.output_schema
164
+ self._infos["input_schema"] = str(self._input_schema)
165
+ self._infos["output_schema"] = str(self._output_schema)
166
+ else:
167
+ self._input_schema = self._configuration.input_schema_definition
168
+ self._output_schema = self._configuration.output_schema_definition
169
+
170
+ self._infos["status"] = "Initialized"
171
+ self._infos["config_used"] = self._configuration.model_dump()
172
+
173
+ async def close(self):
174
+ """Closes any open resources, like database connections."""
175
+ if self._connection:
176
+ await self._connection.close()
177
+ self._connection = None
178
+ print("Database connection closed.")
179
+
180
+ async def _setup_persistence(self) -> None:
181
+ """Configures the agent's persistence (checkpoint and store) asynchronously."""
182
+ if not self._configuration:
183
+ return
184
+
185
+ if self._configuration.checkpointer:
186
+ if isinstance(self._configuration.checkpointer, SqliteCheckpointConfig):
187
+ self._connection = await aiosqlite.connect(
188
+ self._configuration.checkpointer.db_path
189
+ )
190
+ self._checkpointer = AsyncSqliteSaver(conn=self._connection)
191
+ self._infos["checkpointer"] = (
192
+ self._configuration.checkpointer.model_dump()
193
+ )
194
+ else:
195
+ raise NotImplementedError("Only SQLite checkpointer is supported.")
196
+
197
+ if self._configuration.store:
198
+ raise NotImplementedError("Store functionality is not yet implemented.")
199
+
200
+ def _load_graph_builder(self, graph_definition: str) -> StateGraph:
201
+ """Loads a StateGraph instance from a specified path."""
202
+ try:
203
+ module_path, graph_variable_name = graph_definition.rsplit(":", 1)
204
+ except ValueError:
205
+ raise ValueError(
206
+ "graph_definition must be in the format 'path/to/file.py:variable_name'"
207
+ ) from None
208
+
209
+ try:
210
+ spec = importlib.util.spec_from_file_location(
211
+ graph_variable_name, module_path
212
+ )
213
+ if spec is None or spec.loader is None:
214
+ raise ImportError(f"Could not load spec for module at {module_path}")
215
+
216
+ module = importlib.util.module_from_spec(spec)
217
+ spec.loader.exec_module(module)
218
+
219
+ graph_builder = getattr(module, graph_variable_name)
220
+ except (FileNotFoundError, ImportError, AttributeError) as e:
221
+ raise ValueError(
222
+ f"Failed to load agent from {graph_definition}: {e}"
223
+ ) from e
224
+
225
+ if not isinstance(graph_builder, StateGraph):
226
+ raise TypeError(
227
+ f"The variable '{graph_variable_name}' from {module_path} is not a StateGraph instance."
228
+ )
229
+
230
+ return graph_builder
231
+
232
+ async def invoke(self, message: Any) -> Any:
233
+ """Process a single input to chat with the agent.
234
+
235
+ The message should be a dictionary containing 'query' and 'session_id'.
236
+ """
237
+ if self._agent_instance is None:
238
+ raise RuntimeError(
239
+ "Agent not initialized. Call initialize() before processing messages."
240
+ )
241
+
242
+ if (
243
+ not isinstance(message, dict)
244
+ or "query" not in message
245
+ or "session_id" not in message
246
+ ):
247
+ raise ValueError(
248
+ "Message must be a dictionary with 'query' and 'session_id' keys."
249
+ )
250
+
251
+ graph_input = {"messages": [("user", message["query"])]}
252
+ config: dict[str, Any] = {"configurable": {"thread_id": message["session_id"]}}
253
+ if self._obs_callbacks:
254
+ config["callbacks"] = self._obs_callbacks
255
+ if self._obs_run_name:
256
+ config["run_name"] = self._obs_run_name
257
+
258
+ output = await self._agent_instance.ainvoke(graph_input, config)
259
+
260
+ if output and "messages" in output and output["messages"]:
261
+ response_message = output["messages"][-1]
262
+ if hasattr(response_message, "content"):
263
+ return response_message.content
264
+ elif isinstance(response_message, dict) and "content" in response_message:
265
+ return response_message["content"]
266
+ elif isinstance(response_message, tuple):
267
+ return response_message[1]
268
+ else:
269
+ # No usable content attribute; fall through to returning raw output
270
+ pass
271
+
272
+ return output
273
+
274
+ async def stream(self, message: Any) -> AsyncGenerator[Any]:
275
+ """Processes a single input message and returns a stream of ag-ui events."""
276
+ if self._agent_instance is None:
277
+ raise RuntimeError(
278
+ "Agent not initialized. Call initialize() before processing messages."
279
+ )
280
+
281
+ if isinstance(message, dict) and "query" in message and "session_id" in message:
282
+ run_id = f"run_{uuid.uuid4()}"
283
+ thread_id = message["session_id"]
284
+ user_message = ag_types.UserMessage(
285
+ id=f"msg_{uuid.uuid4()}", role="user", content=message["query"]
286
+ )
287
+ graph_input = {
288
+ "messages": [user_message.model_dump(by_alias=True, exclude_none=True)]
289
+ }
290
+ else:
291
+ raise ValueError(
292
+ "Unsupported message format for process_message_stream. Expects {'query': str, 'session_id': str}"
293
+ )
294
+
295
+ config: dict[str, Any] = {"configurable": {"thread_id": thread_id}}
296
+ if self._obs_callbacks:
297
+ config["callbacks"] = self._obs_callbacks
298
+ if self._obs_run_name:
299
+ config["run_name"] = self._obs_run_name
300
+
301
+ current_message_id: str | None = None
302
+ current_tool_call_id: str | None = None
303
+ tool_call_name: str | None = None
304
+ current_step_name = None
305
+
306
+ async for event in self._agent_instance.astream_events(
307
+ graph_input, config=config, version="v2"
308
+ ):
309
+ kind = event["event"]
310
+ name = event["name"]
311
+
312
+ if kind == "on_chain_start":
313
+ current_step_name = name
314
+ if current_step_name.lower() == "langgraph":
315
+ yield ag_events.RunStartedEvent(
316
+ type=ag_events.EventType.RUN_STARTED,
317
+ run_id=run_id,
318
+ thread_id=thread_id,
319
+ )
320
+ else:
321
+ yield ag_events.StepStartedEvent(
322
+ type=ag_events.EventType.STEP_STARTED, step_name=name
323
+ )
324
+
325
+ elif kind == "on_chain_end":
326
+ if current_step_name:
327
+ yield ag_events.StepFinishedEvent(
328
+ type=ag_events.EventType.STEP_FINISHED, step_name=name
329
+ )
330
+ current_step_name = None
331
+
332
+ elif kind == "on_llm_start":
333
+ yield ag_events.ThinkingStartEvent(
334
+ type=ag_events.EventType.THINKING_START,
335
+ title=f"Thinking with {name}...",
336
+ )
337
+
338
+ elif kind == "on_llm_end":
339
+ yield ag_events.ThinkingEndEvent(type=ag_events.EventType.THINKING_END)
340
+
341
+ elif kind == "on_chat_model_stream":
342
+ chunk = event["data"]["chunk"]
343
+ if not current_message_id and (chunk.content or chunk.tool_calls):
344
+ current_message_id = f"msg_{uuid.uuid4()}"
345
+ yield ag_events.TextMessageStartEvent(
346
+ type=ag_events.EventType.TEXT_MESSAGE_START,
347
+ message_id=current_message_id or "",
348
+ role="assistant",
349
+ )
350
+
351
+ if chunk.content:
352
+ yield ag_events.TextMessageContentEvent(
353
+ type=ag_events.EventType.TEXT_MESSAGE_CONTENT,
354
+ message_id=current_message_id or "",
355
+ delta=chunk.content,
356
+ )
357
+
358
+ if chunk.tool_calls:
359
+ for tc in chunk.tool_calls:
360
+ if "id" in tc and tc["id"] != current_tool_call_id:
361
+ if (
362
+ current_tool_call_id
363
+ ): # End previous tool call if a new one starts
364
+ yield ag_events.ToolCallEndEvent(
365
+ type=ag_events.EventType.TOOL_CALL_END,
366
+ tool_call_id=current_tool_call_id,
367
+ )
368
+
369
+ current_tool_call_id = (
370
+ str(tc["id"]) if tc.get("id") is not None else None
371
+ )
372
+ tool_call_name = (
373
+ str(tc["function"]["name"])
374
+ if tc.get("function")
375
+ and tc["function"].get("name") is not None
376
+ else None
377
+ )
378
+ yield ag_events.ToolCallStartEvent(
379
+ type=ag_events.EventType.TOOL_CALL_START,
380
+ tool_call_id=current_tool_call_id or "",
381
+ tool_call_name=tool_call_name or "",
382
+ parent_message_id=current_message_id or "",
383
+ )
384
+
385
+ if (
386
+ "function" in tc
387
+ and "arguments" in tc["function"]
388
+ and tc["function"]["arguments"]
389
+ ):
390
+ yield ag_events.ToolCallArgsEvent(
391
+ type=ag_events.EventType.TOOL_CALL_ARGS,
392
+ tool_call_id=current_tool_call_id or "",
393
+ delta=tc["function"]["arguments"],
394
+ )
395
+
396
+ elif kind == "on_tool_start":
397
+ yield ag_events.StepStartedEvent(
398
+ type=ag_events.EventType.STEP_STARTED, step_name=name
399
+ )
400
+
401
+ elif kind == "on_tool_end":
402
+ # Tool end event from langgraph has the tool output, but ag-ui model doesn't have a place for it in ToolCallEndEvent
403
+ if current_tool_call_id:
404
+ yield ag_events.ToolCallEndEvent(
405
+ type=ag_events.EventType.TOOL_CALL_END,
406
+ tool_call_id=current_tool_call_id or "",
407
+ )
408
+ current_tool_call_id = None
409
+
410
+ yield ag_events.StepFinishedEvent(
411
+ type=ag_events.EventType.STEP_FINISHED, step_name=name
412
+ )
413
+ tool_call_name = None
414
+
415
+ if current_tool_call_id:
416
+ yield ag_events.ToolCallEndEvent(
417
+ type=ag_events.EventType.TOOL_CALL_END,
418
+ tool_call_id=current_tool_call_id or "",
419
+ )
420
+
421
+ if current_message_id:
422
+ yield ag_events.TextMessageEndEvent(
423
+ type=ag_events.EventType.TEXT_MESSAGE_END,
424
+ message_id=current_message_id or "",
425
+ )
426
+
427
+ yield ag_events.RunFinishedEvent(
428
+ type=ag_events.EventType.RUN_FINISHED, run_id=run_id, thread_id=thread_id
429
+ )
@@ -0,0 +1,16 @@
1
+ """Command Line Interface for Idun Agent Engine.
2
+
3
+ This module will provide CLI tools for:
4
+ - Generating boilerplate projects
5
+ - Running agents from the command line
6
+ - Validating configurations
7
+ - Deploying agents to various platforms
8
+
9
+ Future commands will include:
10
+ - `idun init` - Create a new agent project
11
+ - `idun run` - Run an agent from config
12
+ - `idun validate` - Validate configuration files
13
+ - `idun deploy` - Deploy to cloud platforms
14
+ """
15
+
16
+ # Future CLI entry points will be defined here
@@ -0,0 +1,11 @@
1
+ """Core module for the Idun Agent Engine.
2
+
3
+ This module contains the user-facing API components that make it easy to:
4
+ - Create FastAPI applications with agent integrations
5
+ - Run servers with proper configuration
6
+ - Build configurations programmatically
7
+ - Handle common deployment scenarios
8
+
9
+ The core module abstracts away the internal complexity while providing
10
+ a clean, intuitive interface for end users.
11
+ """
@@ -0,0 +1,63 @@
1
+ """Application Factory for Idun Agent Engine.
2
+
3
+ This module provides the main entry point for users to create a FastAPI
4
+ application with their agent integrated. It handles all the complexity of
5
+ setting up routes, dependencies, and lifecycle management behind the scenes.
6
+ """
7
+
8
+ from typing import Any
9
+
10
+ from fastapi import FastAPI
11
+
12
+ from ..server.lifespan import lifespan
13
+ from ..server.routers.agent import agent_router
14
+ from ..server.routers.base import base_router
15
+ from .config_builder import ConfigBuilder
16
+ from .engine_config import EngineConfig
17
+
18
+
19
+ def create_app(
20
+ config_path: str | None = None,
21
+ config_dict: dict[str, Any] | None = None,
22
+ engine_config: EngineConfig | None = None,
23
+ ) -> FastAPI:
24
+ """Create a FastAPI application with an integrated agent.
25
+
26
+ This is the main entry point for users of the Idun Agent Engine. It creates a
27
+ fully configured FastAPI application that serves your agent with proper
28
+ lifecycle management, routing, and error handling.
29
+
30
+ Args:
31
+ config_path: Optional path to a YAML configuration file. If not provided,
32
+ looks for 'config.yaml' in the current directory.
33
+ config_dict: Optional dictionary containing configuration. If provided,
34
+ takes precedence over config_path. Useful for programmatic configuration.
35
+ engine_config: Pre-validated EngineConfig instance (from ConfigBuilder.build()).
36
+ Takes precedence over other options.
37
+
38
+ Returns:
39
+ FastAPI: A configured FastAPI application ready to serve your agent.
40
+ """
41
+ # Resolve configuration from various sources using ConfigBuilder's umbrella function
42
+ validated_config = ConfigBuilder.resolve_config(
43
+ config_path=config_path, config_dict=config_dict, engine_config=engine_config
44
+ )
45
+
46
+ # Create the FastAPI application
47
+ app = FastAPI(
48
+ lifespan=lifespan,
49
+ title="Idun Agent Engine Server",
50
+ description="A production-ready server for conversational AI agents",
51
+ version="0.1.0",
52
+ docs_url="/docs",
53
+ redoc_url="/redoc",
54
+ )
55
+
56
+ # Store configuration in app state for lifespan to use
57
+ app.state.engine_config = validated_config
58
+
59
+ # Include the routers
60
+ app.include_router(agent_router, prefix="/agent", tags=["Agent"])
61
+ app.include_router(base_router, tags=["Base"])
62
+
63
+ return app