idun-agent-engine 0.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. idun_agent_engine/__init__.py +24 -0
  2. idun_agent_engine/_version.py +3 -0
  3. idun_agent_engine/agent/__init__.py +10 -0
  4. idun_agent_engine/agent/adk/__init__.py +5 -0
  5. idun_agent_engine/agent/adk/adk.py +296 -0
  6. idun_agent_engine/agent/base.py +112 -0
  7. idun_agent_engine/agent/haystack/__init__.py +9 -0
  8. idun_agent_engine/agent/haystack/haystack.py +274 -0
  9. idun_agent_engine/agent/haystack/haystack_model.py +13 -0
  10. idun_agent_engine/agent/haystack/utils.py +13 -0
  11. idun_agent_engine/agent/langgraph/__init__.py +7 -0
  12. idun_agent_engine/agent/langgraph/langgraph.py +553 -0
  13. idun_agent_engine/core/__init__.py +11 -0
  14. idun_agent_engine/core/app_factory.py +73 -0
  15. idun_agent_engine/core/config_builder.py +657 -0
  16. idun_agent_engine/core/engine_config.py +21 -0
  17. idun_agent_engine/core/server_runner.py +145 -0
  18. idun_agent_engine/guardrails/__init__.py +0 -0
  19. idun_agent_engine/guardrails/base.py +24 -0
  20. idun_agent_engine/guardrails/guardrails_hub/guardrails_hub.py +101 -0
  21. idun_agent_engine/guardrails/guardrails_hub/utils.py +1 -0
  22. idun_agent_engine/mcp/__init__.py +5 -0
  23. idun_agent_engine/mcp/helpers.py +97 -0
  24. idun_agent_engine/mcp/registry.py +109 -0
  25. idun_agent_engine/observability/__init__.py +17 -0
  26. idun_agent_engine/observability/base.py +172 -0
  27. idun_agent_engine/observability/gcp_logging/__init__.py +0 -0
  28. idun_agent_engine/observability/gcp_logging/gcp_logging_handler.py +52 -0
  29. idun_agent_engine/observability/gcp_trace/__init__.py +0 -0
  30. idun_agent_engine/observability/gcp_trace/gcp_trace_handler.py +116 -0
  31. idun_agent_engine/observability/langfuse/__init__.py +5 -0
  32. idun_agent_engine/observability/langfuse/langfuse_handler.py +79 -0
  33. idun_agent_engine/observability/phoenix/__init__.py +5 -0
  34. idun_agent_engine/observability/phoenix/phoenix_handler.py +65 -0
  35. idun_agent_engine/observability/phoenix_local/__init__.py +5 -0
  36. idun_agent_engine/observability/phoenix_local/phoenix_local_handler.py +123 -0
  37. idun_agent_engine/py.typed +0 -0
  38. idun_agent_engine/server/__init__.py +5 -0
  39. idun_agent_engine/server/dependencies.py +52 -0
  40. idun_agent_engine/server/lifespan.py +106 -0
  41. idun_agent_engine/server/routers/__init__.py +5 -0
  42. idun_agent_engine/server/routers/agent.py +204 -0
  43. idun_agent_engine/server/routers/agui.py +47 -0
  44. idun_agent_engine/server/routers/base.py +114 -0
  45. idun_agent_engine/server/server_config.py +8 -0
  46. idun_agent_engine/templates/__init__.py +1 -0
  47. idun_agent_engine/templates/correction.py +65 -0
  48. idun_agent_engine/templates/deep_research.py +40 -0
  49. idun_agent_engine/templates/translation.py +70 -0
  50. idun_agent_engine-0.3.4.dist-info/METADATA +335 -0
  51. idun_agent_engine-0.3.4.dist-info/RECORD +60 -0
  52. idun_agent_engine-0.3.4.dist-info/WHEEL +4 -0
  53. idun_agent_engine-0.3.4.dist-info/entry_points.txt +2 -0
  54. idun_platform_cli/__init__.py +0 -0
  55. idun_platform_cli/groups/__init__.py +0 -0
  56. idun_platform_cli/groups/agent/__init__.py +0 -0
  57. idun_platform_cli/groups/agent/main.py +16 -0
  58. idun_platform_cli/groups/agent/package.py +70 -0
  59. idun_platform_cli/groups/agent/serve.py +107 -0
  60. idun_platform_cli/main.py +14 -0
@@ -0,0 +1,553 @@
1
+ """LangGraph agent adapter implementing the BaseAgent protocol."""
2
+
3
+ import importlib.util
4
+ import importlib
5
+ import uuid
6
+ from collections.abc import AsyncGenerator
7
+ from typing import Any
8
+
9
+ import aiosqlite
10
+ from ag_ui.core import events as ag_events
11
+ from ag_ui.core import types as ag_types
12
+ from idun_agent_schema.engine.langgraph import (
13
+ InMemoryCheckpointConfig,
14
+ LangGraphAgentConfig,
15
+ PostgresCheckpointConfig,
16
+ SqliteCheckpointConfig,
17
+ )
18
+ from idun_agent_schema.engine.observability_v2 import ObservabilityConfig
19
+ from langgraph.checkpoint.memory import InMemorySaver
20
+ from langgraph.checkpoint.postgres.aio import AsyncPostgresSaver
21
+ from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver
22
+ from langgraph.graph import StateGraph
23
+ from langgraph.graph.state import CompiledStateGraph
24
+
25
+ from idun_agent_engine import observability
26
+ from idun_agent_engine.agent import base as agent_base
27
+ from copilotkit import LangGraphAGUIAgent
28
+
29
+
30
+ class LanggraphAgent(agent_base.BaseAgent):
31
+ """LangGraph agent adapter implementing the BaseAgent protocol."""
32
+
33
+ def __init__(self):
34
+ """Initialize an unconfigured LanggraphAgent with default state."""
35
+ self._id = str(uuid.uuid4())
36
+ self._agent_type = "LangGraph"
37
+ self._input_schema: Any = None
38
+ self._output_schema: Any = None
39
+ self._agent_instance: Any = None
40
+ self._copilotkit_agent_instance: LangGraphAGUIAgent | None = None
41
+ self._checkpointer: Any = None
42
+ self._store: Any = None
43
+ self._connection: Any = None
44
+ self._configuration: LangGraphAgentConfig | None = None
45
+ self._name: str = "Unnamed LangGraph Agent"
46
+ self._infos: dict[str, Any] = {
47
+ "status": "Uninitialized",
48
+ "name": self._name,
49
+ "id": self._id,
50
+ }
51
+ # Observability (provider-agnostic)
52
+ self._obs_callbacks: list[Any] | None = None
53
+ self._obs_run_name: str | None = None
54
+
55
+ @property
56
+ def id(self) -> str:
57
+ """Return unique identifier for this agent instance."""
58
+ return self._id
59
+
60
+ @property
61
+ def agent_type(self) -> str:
62
+ """Return agent type label."""
63
+ return self._agent_type
64
+
65
+ @property
66
+ def name(self) -> str:
67
+ """Return configured human-readable agent name."""
68
+ return self._name
69
+
70
+ @property
71
+ def input_schema(self) -> Any:
72
+ """Return input schema provided by underlying graph if available."""
73
+ return self._input_schema
74
+
75
+ @property
76
+ def output_schema(self) -> Any:
77
+ """Return output schema provided by underlying graph if available."""
78
+ return self._output_schema
79
+
80
+ @property
81
+ def agent_instance(self) -> Any:
82
+ """Return compiled graph instance.
83
+
84
+ Raises:
85
+ RuntimeError: If the agent is not yet initialized.
86
+ """
87
+ if self._agent_instance is None:
88
+ raise RuntimeError("Agent not initialized. Call initialize() first.")
89
+ return self._agent_instance
90
+
91
+ @property
92
+ def copilotkit_agent_instance(self) -> LangGraphAGUIAgent:
93
+ """Return the CopilotKit agent instance.
94
+
95
+ Raises:
96
+ RuntimeError: If the CopilotKit agent is not yet initialized.
97
+ """
98
+ if self._copilotkit_agent_instance is None:
99
+ raise RuntimeError(
100
+ "CopilotKit agent not initialized. Call initialize() first."
101
+ )
102
+ return self._copilotkit_agent_instance
103
+
104
+ @property
105
+ def configuration(self) -> LangGraphAgentConfig:
106
+ """Return validated configuration.
107
+
108
+ Raises:
109
+ RuntimeError: If the agent has not been configured yet.
110
+ """
111
+ if not self._configuration:
112
+ raise RuntimeError("Agent not configured. Call initialize() first.")
113
+ return self._configuration
114
+
115
+ @property
116
+ def infos(self) -> dict[str, Any]:
117
+ """Return diagnostic information about the agent instance."""
118
+ self._infos["underlying_agent_type"] = (
119
+ str(type(self._agent_instance)) if self._agent_instance else "N/A"
120
+ )
121
+ return self._infos
122
+
123
+ async def initialize(
124
+ self,
125
+ config: LangGraphAgentConfig,
126
+ observability_config: list[ObservabilityConfig] | None = None,
127
+ ) -> None:
128
+ """Initialize the LangGraph agent asynchronously."""
129
+ self._configuration = LangGraphAgentConfig.model_validate(config)
130
+
131
+ self._name = self._configuration.name or "Unnamed LangGraph Agent"
132
+ self._infos["name"] = self._name
133
+
134
+ await self._setup_persistence()
135
+
136
+ # Observability (provider-agnostic)
137
+ if observability_config:
138
+ handlers, infos = observability.create_observability_handlers(
139
+ observability_config # type: ignore[arg-type]
140
+ )
141
+ self._obs_callbacks = []
142
+ for handler in handlers:
143
+ self._obs_callbacks.extend(handler.get_callbacks())
144
+ # Use the first run name found if not set
145
+ if not self._obs_run_name:
146
+ self._obs_run_name = handler.get_run_name()
147
+
148
+ if infos:
149
+ self._infos["observability"] = infos
150
+
151
+ # Fallback to legacy generic block or langfuse block if no new observability config provided
152
+ elif getattr(self._configuration, "observability", None) or getattr(
153
+ self._configuration, "langfuse", None
154
+ ):
155
+ obs_cfg = None
156
+ try:
157
+ if getattr(self._configuration, "observability", None):
158
+ obs_cfg = self._configuration.observability.resolved() # type: ignore[attr-defined]
159
+ elif getattr(self._configuration, "langfuse", None):
160
+ lf = self._configuration.langfuse.resolved() # type: ignore[attr-defined]
161
+ obs_cfg = type(
162
+ "_Temp",
163
+ (),
164
+ {
165
+ "provider": "langfuse",
166
+ "enabled": lf.enabled,
167
+ "options": {
168
+ "host": lf.host,
169
+ "public_key": lf.public_key,
170
+ "secret_key": lf.secret_key,
171
+ "run_name": lf.run_name,
172
+ },
173
+ },
174
+ )()
175
+ except Exception:
176
+ obs_cfg = None
177
+
178
+ if obs_cfg and getattr(obs_cfg, "enabled", False):
179
+ provider = getattr(obs_cfg, "provider", None)
180
+ options = dict(getattr(obs_cfg, "options", {}) or {})
181
+ # Fallback: if using Langfuse and run_name is not provided, use agent name
182
+ if provider == "langfuse" and not options.get("run_name"):
183
+ options["run_name"] = self._name
184
+
185
+ handler, info = observability.create_observability_handler(
186
+ {
187
+ "provider": provider,
188
+ "enabled": True,
189
+ "options": options,
190
+ }
191
+ )
192
+ if handler:
193
+ self._obs_callbacks = handler.get_callbacks()
194
+ self._obs_run_name = handler.get_run_name()
195
+ if info:
196
+ self._infos["observability"] = dict(info)
197
+
198
+ graph_builder = self._load_graph_builder(self._configuration.graph_definition)
199
+ self._infos["graph_definition"] = self._configuration.graph_definition
200
+
201
+ if isinstance(graph_builder, StateGraph):
202
+ self._agent_instance = graph_builder.compile(
203
+ checkpointer=self._checkpointer, store=self._store
204
+ )
205
+ elif isinstance(graph_builder, CompiledStateGraph):
206
+ self._agent_instance = graph_builder
207
+
208
+ self._copilotkit_agent_instance = LangGraphAGUIAgent(
209
+ name=self._name,
210
+ description="Agent description", # TODO: add agent description
211
+ graph=self._agent_instance,
212
+ config={"callbacks": self._obs_callbacks} if self._obs_callbacks else None,
213
+ )
214
+
215
+ self._copilotkit_agent_instance = LangGraphAGUIAgent(
216
+ name=self._name,
217
+ description="Agent description", # TODO: add agent description
218
+ graph=self._agent_instance,
219
+ )
220
+
221
+ if self._agent_instance:
222
+ try:
223
+ self._input_schema = self._agent_instance.input_schema
224
+ self._output_schema = self._agent_instance.output_schema
225
+ self._infos["input_schema"] = str(self._input_schema)
226
+ self._infos["output_schema"] = str(self._output_schema)
227
+ except Exception:
228
+ print("Could not parse schema")
229
+ self._input_schema = self._configuration.input_schema_definition
230
+ self._output_schema = self._configuration.output_schema_definition
231
+ self._infos["input_schema"] = "Cannot extract schema"
232
+ self._infos["output_schema"] = "Cannot extract schema"
233
+
234
+ else:
235
+ self._input_schema = self._configuration.input_schema_definition
236
+ self._output_schema = self._configuration.output_schema_definition
237
+
238
+ self._infos["status"] = "Initialized"
239
+ self._infos["config_used"] = self._configuration.model_dump()
240
+
241
+ async def close(self):
242
+ """Closes any open resources, like database connections."""
243
+ if self._connection:
244
+ await self._connection.close()
245
+ self._connection = None
246
+ print("Database connection closed.")
247
+
248
+ async def _setup_persistence(self) -> None:
249
+ """Configures the agent's persistence (checkpoint and store) asynchronously."""
250
+ if not self._configuration:
251
+ return
252
+
253
+ if self._configuration.checkpointer:
254
+ if isinstance(self._configuration.checkpointer, SqliteCheckpointConfig):
255
+ self._connection = await aiosqlite.connect(
256
+ self._configuration.checkpointer.db_path
257
+ )
258
+ self._checkpointer = AsyncSqliteSaver(conn=self._connection)
259
+ self._infos["checkpointer"] = (
260
+ self._configuration.checkpointer.model_dump()
261
+ )
262
+ elif isinstance(self._configuration.checkpointer, InMemoryCheckpointConfig):
263
+ self._checkpointer = InMemorySaver()
264
+ self._infos["checkpointer"] = (
265
+ self._configuration.checkpointer.model_dump()
266
+ )
267
+ elif isinstance(self._configuration.checkpointer, PostgresCheckpointConfig):
268
+ self._checkpointer = AsyncPostgresSaver.from_conn_string(
269
+ self._configuration.checkpointer.db_url
270
+ )
271
+ await self._checkpointer.setup()
272
+ self._infos["checkpointer"] = (
273
+ self._configuration.checkpointer.model_dump()
274
+ )
275
+ else:
276
+ raise NotImplementedError(
277
+ f"Checkpointer type {type(self._configuration.checkpointer)} is not supported."
278
+ )
279
+
280
+ if self._configuration.store:
281
+ raise NotImplementedError("Store functionality is not yet implemented.")
282
+
283
+ def _load_graph_builder(self, graph_definition: str) -> StateGraph:
284
+ """Loads a StateGraph instance from a specified path."""
285
+ try:
286
+ module_path, graph_variable_name = graph_definition.rsplit(":", 1)
287
+ if not module_path.endswith(".py"):
288
+ module_path += ".py"
289
+ except ValueError:
290
+ raise ValueError(
291
+ "graph_definition must be in the format 'path/to/file.py:variable_name'"
292
+ ) from None
293
+
294
+ # Try loading as a file path first
295
+ try:
296
+ import os
297
+
298
+ print("Current directory: ", os.getcwd()) # TODO remove
299
+ from pathlib import Path
300
+
301
+ resolved_path = Path(module_path).resolve()
302
+ # If the file doesn't exist, it might be a python module path
303
+ if not resolved_path.exists():
304
+ raise FileNotFoundError
305
+
306
+ spec = importlib.util.spec_from_file_location(
307
+ graph_variable_name, str(resolved_path)
308
+ )
309
+ if spec is None or spec.loader is None:
310
+ raise ImportError(f"Could not load spec for module at {module_path}")
311
+
312
+ module = importlib.util.module_from_spec(spec)
313
+ spec.loader.exec_module(module)
314
+
315
+ graph_builder = getattr(module, graph_variable_name)
316
+ return self._validate_graph_builder(
317
+ graph_builder, module_path, graph_variable_name
318
+ )
319
+
320
+ except (FileNotFoundError, ImportError):
321
+ # Fallback: try loading as a python module
322
+ try:
323
+ module_import_path = (
324
+ module_path[:-3] if module_path.endswith(".py") else module_path
325
+ )
326
+ module = importlib.import_module(module_import_path)
327
+ graph_builder = getattr(module, graph_variable_name)
328
+ return self._validate_graph_builder(
329
+ graph_builder, module_path, graph_variable_name
330
+ )
331
+ except ImportError as e:
332
+ raise ValueError(
333
+ f"Failed to load agent from {graph_definition}. Checked file path and python module: {e}"
334
+ ) from e
335
+ except AttributeError as e:
336
+ raise ValueError(
337
+ f"Variable '{graph_variable_name}' not found in module {module_path}: {e}"
338
+ ) from e
339
+ except Exception as e:
340
+ raise ValueError(
341
+ f"Failed to load agent from {graph_definition}: {e}"
342
+ ) from e
343
+
344
+ def _validate_graph_builder(
345
+ self, graph_builder: Any, module_path: str, graph_variable_name: str
346
+ ) -> StateGraph:
347
+ # TODO to remove, dirty fix for template deepagent langgraph
348
+ if not isinstance(graph_builder, StateGraph) and not isinstance(
349
+ graph_builder, CompiledStateGraph
350
+ ):
351
+ raise TypeError(
352
+ f"The variable '{graph_variable_name}' from {module_path} is not a StateGraph instance."
353
+ )
354
+ return graph_builder # type: ignore[return-value]
355
+
356
+ async def invoke(self, message: Any) -> Any:
357
+ """Process a single input to chat with the agent.
358
+
359
+ The message should be a dictionary containing 'query' and 'session_id'.
360
+ """
361
+ if self._agent_instance is None:
362
+ raise RuntimeError(
363
+ "Agent not initialized. Call initialize() before processing messages."
364
+ )
365
+
366
+ if (
367
+ not isinstance(message, dict)
368
+ or "query" not in message
369
+ or "session_id" not in message
370
+ ):
371
+ raise ValueError(
372
+ "Message must be a dictionary with 'query' and 'session_id' keys."
373
+ )
374
+
375
+ graph_input = {"messages": [("user", message["query"])]}
376
+ config: dict[str, Any] = {"configurable": {"thread_id": message["session_id"]}}
377
+ if self._obs_callbacks:
378
+ config["callbacks"] = self._obs_callbacks
379
+ if self._obs_run_name:
380
+ config["run_name"] = self._obs_run_name
381
+
382
+ output = await self._agent_instance.ainvoke(graph_input, config)
383
+
384
+ if output and "messages" in output and output["messages"]:
385
+ response_message = output["messages"][-1]
386
+ if hasattr(response_message, "content"):
387
+ return response_message.content
388
+ elif isinstance(response_message, dict) and "content" in response_message:
389
+ return response_message["content"]
390
+ elif isinstance(response_message, tuple):
391
+ return response_message[1]
392
+ else:
393
+ # No usable content attribute; fall through to returning raw output
394
+ pass
395
+
396
+ return output
397
+
398
+ async def stream(self, message: Any) -> AsyncGenerator[Any]:
399
+ """Processes a single input message and returns a stream of ag-ui events."""
400
+ if self._agent_instance is None:
401
+ raise RuntimeError(
402
+ "Agent not initialized. Call initialize() before processing messages."
403
+ )
404
+
405
+ if isinstance(message, dict) and "query" in message and "session_id" in message:
406
+ run_id = f"run_{uuid.uuid4()}"
407
+ thread_id = message["session_id"]
408
+ user_message = ag_types.UserMessage(
409
+ id=f"msg_{uuid.uuid4()}", role="user", content=message["query"]
410
+ )
411
+ graph_input = {
412
+ "messages": [user_message.model_dump(by_alias=True, exclude_none=True)]
413
+ }
414
+ else:
415
+ raise ValueError(
416
+ "Unsupported message format for process_message_stream. Expects {'query': str, 'session_id': str}"
417
+ )
418
+
419
+ config: dict[str, Any] = {"configurable": {"thread_id": thread_id}}
420
+ if self._obs_callbacks:
421
+ config["callbacks"] = self._obs_callbacks
422
+ if self._obs_run_name:
423
+ config["run_name"] = self._obs_run_name
424
+
425
+ current_message_id: str | None = None
426
+ current_tool_call_id: str | None = None
427
+ tool_call_name: str | None = None
428
+ current_step_name = None
429
+
430
+ async for event in self._agent_instance.astream_events(
431
+ graph_input, config=config, version="v2"
432
+ ):
433
+ kind = event["event"]
434
+ name = event["name"]
435
+
436
+ if kind == "on_chain_start":
437
+ current_step_name = name
438
+ if current_step_name.lower() == "langgraph":
439
+ yield ag_events.RunStartedEvent(
440
+ type=ag_events.EventType.RUN_STARTED,
441
+ run_id=run_id,
442
+ thread_id=thread_id,
443
+ )
444
+ else:
445
+ yield ag_events.StepStartedEvent(
446
+ type=ag_events.EventType.STEP_STARTED, step_name=name
447
+ )
448
+
449
+ elif kind == "on_chain_end":
450
+ if current_step_name:
451
+ yield ag_events.StepFinishedEvent(
452
+ type=ag_events.EventType.STEP_FINISHED, step_name=name
453
+ )
454
+ current_step_name = None
455
+
456
+ elif kind == "on_llm_start":
457
+ yield ag_events.ThinkingStartEvent(
458
+ type=ag_events.EventType.THINKING_START,
459
+ title=f"Thinking with {name}...",
460
+ )
461
+
462
+ elif kind == "on_llm_end":
463
+ yield ag_events.ThinkingEndEvent(type=ag_events.EventType.THINKING_END)
464
+
465
+ elif kind == "on_chat_model_stream":
466
+ chunk = event["data"]["chunk"]
467
+ if not current_message_id and (chunk.content or chunk.tool_calls):
468
+ current_message_id = f"msg_{uuid.uuid4()}"
469
+ yield ag_events.TextMessageStartEvent(
470
+ type=ag_events.EventType.TEXT_MESSAGE_START,
471
+ message_id=current_message_id or "",
472
+ role="assistant",
473
+ )
474
+
475
+ if chunk.content:
476
+ yield ag_events.TextMessageContentEvent(
477
+ type=ag_events.EventType.TEXT_MESSAGE_CONTENT,
478
+ message_id=current_message_id or "",
479
+ delta=chunk.content,
480
+ )
481
+
482
+ if chunk.tool_calls:
483
+ for tc in chunk.tool_calls:
484
+ if "id" in tc and tc["id"] != current_tool_call_id:
485
+ if (
486
+ current_tool_call_id
487
+ ): # End previous tool call if a new one starts
488
+ yield ag_events.ToolCallEndEvent(
489
+ type=ag_events.EventType.TOOL_CALL_END,
490
+ tool_call_id=current_tool_call_id,
491
+ )
492
+
493
+ current_tool_call_id = (
494
+ str(tc["id"]) if tc.get("id") is not None else None
495
+ )
496
+ tool_call_name = (
497
+ str(tc["function"]["name"])
498
+ if tc.get("function")
499
+ and tc["function"].get("name") is not None
500
+ else None
501
+ )
502
+ yield ag_events.ToolCallStartEvent(
503
+ type=ag_events.EventType.TOOL_CALL_START,
504
+ tool_call_id=current_tool_call_id or "",
505
+ tool_call_name=tool_call_name or "",
506
+ parent_message_id=current_message_id or "",
507
+ )
508
+
509
+ if (
510
+ "function" in tc
511
+ and "arguments" in tc["function"]
512
+ and tc["function"]["arguments"]
513
+ ):
514
+ yield ag_events.ToolCallArgsEvent(
515
+ type=ag_events.EventType.TOOL_CALL_ARGS,
516
+ tool_call_id=current_tool_call_id or "",
517
+ delta=tc["function"]["arguments"],
518
+ )
519
+
520
+ elif kind == "on_tool_start":
521
+ yield ag_events.StepStartedEvent(
522
+ type=ag_events.EventType.STEP_STARTED, step_name=name
523
+ )
524
+
525
+ elif kind == "on_tool_end":
526
+ # Tool end event from langgraph has the tool output, but ag-ui model doesn't have a place for it in ToolCallEndEvent
527
+ if current_tool_call_id:
528
+ yield ag_events.ToolCallEndEvent(
529
+ type=ag_events.EventType.TOOL_CALL_END,
530
+ tool_call_id=current_tool_call_id or "",
531
+ )
532
+ current_tool_call_id = None
533
+
534
+ yield ag_events.StepFinishedEvent(
535
+ type=ag_events.EventType.STEP_FINISHED, step_name=name
536
+ )
537
+ tool_call_name = None
538
+
539
+ if current_tool_call_id:
540
+ yield ag_events.ToolCallEndEvent(
541
+ type=ag_events.EventType.TOOL_CALL_END,
542
+ tool_call_id=current_tool_call_id or "",
543
+ )
544
+
545
+ if current_message_id:
546
+ yield ag_events.TextMessageEndEvent(
547
+ type=ag_events.EventType.TEXT_MESSAGE_END,
548
+ message_id=current_message_id or "",
549
+ )
550
+
551
+ yield ag_events.RunFinishedEvent(
552
+ type=ag_events.EventType.RUN_FINISHED, run_id=run_id, thread_id=thread_id
553
+ )
@@ -0,0 +1,11 @@
1
+ """Core module for the Idun Agent Engine.
2
+
3
+ This module contains the user-facing API components that make it easy to:
4
+ - Create FastAPI applications with agent integrations
5
+ - Run servers with proper configuration
6
+ - Build configurations programmatically
7
+ - Handle common deployment scenarios
8
+
9
+ The core module abstracts away the internal complexity while providing
10
+ a clean, intuitive interface for end users.
11
+ """
@@ -0,0 +1,73 @@
1
+ """Application Factory for Idun Agent Engine.
2
+
3
+ This module provides the main entry point for users to create a FastAPI
4
+ application with their agent integrated. It handles all the complexity of
5
+ setting up routes, dependencies, and lifecycle management behind the scenes.
6
+ """
7
+
8
+ from typing import Any
9
+
10
+ from fastapi import FastAPI
11
+ from fastapi.middleware.cors import CORSMiddleware
12
+
13
+ from ..server.lifespan import lifespan
14
+ from ..server.routers.agent import agent_router
15
+ from ..server.routers.base import base_router
16
+ from .config_builder import ConfigBuilder
17
+ from .engine_config import EngineConfig
18
+ from .._version import __version__
19
+
20
+
21
+ def create_app(
22
+ config_path: str | None = None,
23
+ config_dict: dict[str, Any] | None = None,
24
+ engine_config: EngineConfig | None = None,
25
+ ) -> FastAPI:
26
+ """Create a FastAPI application with an integrated agent.
27
+
28
+ This is the main entry point for users of the Idun Agent Engine. It creates a
29
+ fully configured FastAPI application that serves your agent with proper
30
+ lifecycle management, routing, and error handling.
31
+
32
+ Args:
33
+ config_path: Optional path to a YAML configuration file. If not provided,
34
+ looks for 'config.yaml' in the current directory.
35
+ config_dict: Optional dictionary containing configuration. If provided,
36
+ takes precedence over config_path. Useful for programmatic configuration.
37
+ engine_config: Pre-validated EngineConfig instance (from ConfigBuilder.build()).
38
+ Takes precedence over other options.
39
+
40
+ Returns:
41
+ FastAPI: A configured FastAPI application ready to serve your agent.
42
+ """
43
+ # Resolve configuration from various sources using ConfigBuilder's umbrella function
44
+ validated_config = ConfigBuilder.resolve_config(
45
+ config_path=config_path, config_dict=config_dict, engine_config=engine_config
46
+ )
47
+
48
+ # Create the FastAPI application
49
+ app = FastAPI(
50
+ lifespan=lifespan,
51
+ title="Idun Agent Engine Server",
52
+ description="A production-ready server for conversational AI agents",
53
+ version=__version__,
54
+ docs_url="/docs",
55
+ redoc_url="/redoc",
56
+ )
57
+
58
+ app.add_middleware(
59
+ CORSMiddleware,
60
+ allow_origins=["*"],
61
+ allow_credentials=True,
62
+ allow_methods=["*"],
63
+ allow_headers=["*"],
64
+ )
65
+
66
+ # Store configuration in app state for lifespan to use
67
+ app.state.engine_config = validated_config
68
+
69
+ # Include the routers
70
+ app.include_router(agent_router, prefix="/agent", tags=["Agent"])
71
+ app.include_router(base_router, tags=["Base"])
72
+
73
+ return app