agent-runtime-core 0.1.0__tar.gz → 0.1.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. agent_runtime_core-0.1.1/.gitignore +149 -0
  2. agent_runtime_core-0.1.1/PKG-INFO +461 -0
  3. agent_runtime_core-0.1.1/README.md +418 -0
  4. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/__init__.py +1 -1
  5. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/pyproject.toml +1 -1
  6. agent_runtime_core-0.1.0/PKG-INFO +0 -75
  7. agent_runtime_core-0.1.0/README.md +0 -32
  8. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/LICENSE +0 -0
  9. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/config.py +0 -0
  10. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/events/__init__.py +0 -0
  11. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/events/base.py +0 -0
  12. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/events/memory.py +0 -0
  13. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/events/redis.py +0 -0
  14. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/events/sqlite.py +0 -0
  15. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/interfaces.py +0 -0
  16. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/llm/__init__.py +0 -0
  17. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/llm/anthropic.py +0 -0
  18. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/llm/litellm_client.py +0 -0
  19. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/llm/openai.py +0 -0
  20. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/queue/__init__.py +0 -0
  21. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/queue/base.py +0 -0
  22. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/queue/memory.py +0 -0
  23. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/queue/redis.py +0 -0
  24. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/queue/sqlite.py +0 -0
  25. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/registry.py +0 -0
  26. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/runner.py +0 -0
  27. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/state/__init__.py +0 -0
  28. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/state/base.py +0 -0
  29. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/state/memory.py +0 -0
  30. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/state/redis.py +0 -0
  31. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/state/sqlite.py +0 -0
  32. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/tracing/__init__.py +0 -0
  33. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/tracing/langfuse.py +0 -0
  34. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/agent_runtime/tracing/noop.py +0 -0
  35. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/tests/__init__.py +0 -0
  36. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/tests/test_events.py +0 -0
  37. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/tests/test_imports.py +0 -0
  38. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/tests/test_queue.py +0 -0
  39. {agent_runtime_core-0.1.0 → agent_runtime_core-0.1.1}/tests/test_state.py +0 -0
@@ -0,0 +1,149 @@
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ *.manifest
31
+ *.spec
32
+
33
+ # Installer logs
34
+ pip-log.txt
35
+ pip-delete-this-directory.txt
36
+
37
+ # Unit test / coverage reports
38
+ htmlcov/
39
+ .tox/
40
+ .nox/
41
+ .coverage
42
+ .coverage.*
43
+ .cache
44
+ nosetests.xml
45
+ coverage.xml
46
+ *.cover
47
+ *.py,cover
48
+ .hypothesis/
49
+ .pytest_cache/
50
+ cover/
51
+
52
+ # Translations
53
+ *.mo
54
+ *.pot
55
+
56
+ # Django stuff:
57
+ *.log
58
+ local_settings.py
59
+ db.sqlite3
60
+ db.sqlite3-journal
61
+
62
+ # Flask stuff:
63
+ instance/
64
+ .webassets-cache
65
+
66
+ # Scrapy stuff:
67
+ .scrapy
68
+
69
+ # Sphinx documentation
70
+ docs/_build/
71
+
72
+ # PyBuilder
73
+ .pybuilder/
74
+ target/
75
+
76
+ # Jupyter Notebook
77
+ .ipynb_checkpoints
78
+
79
+ # IPython
80
+ profile_default/
81
+ ipython_config.py
82
+
83
+ # pyenv
84
+ .python-version
85
+
86
+ # pipenv
87
+ Pipfile.lock
88
+
89
+ # poetry
90
+ poetry.lock
91
+
92
+ # pdm
93
+ .pdm.toml
94
+ .pdm-python
95
+ .pdm-build/
96
+
97
+ # PEP 582
98
+ __pypackages__/
99
+
100
+ # Celery stuff
101
+ celerybeat-schedule
102
+ celerybeat.pid
103
+
104
+ # SageMath parsed files
105
+ *.sage.py
106
+
107
+ # Environments
108
+ .env
109
+ .venv
110
+ env/
111
+ venv/
112
+ ENV/
113
+ env.bak/
114
+ venv.bak/
115
+
116
+ # Spyder project settings
117
+ .spyderproject
118
+ .spyproject
119
+
120
+ # Rope project settings
121
+ .ropeproject
122
+
123
+ # mkdocs documentation
124
+ /site
125
+
126
+ # mypy
127
+ .mypy_cache/
128
+ .dmypy.json
129
+ dmypy.json
130
+
131
+ # Pyre type checker
132
+ .pyre/
133
+
134
+ # pytype static type analyzer
135
+ .pytype/
136
+
137
+ # Cython debug symbols
138
+ cython_debug/
139
+
140
+ # IDE
141
+ .idea/
142
+ .vscode/
143
+ *.swp
144
+ *.swo
145
+ *~
146
+
147
+ # OS
148
+ .DS_Store
149
+ Thumbs.db
@@ -0,0 +1,461 @@
1
+ Metadata-Version: 2.4
2
+ Name: agent-runtime-core
3
+ Version: 0.1.1
4
+ Summary: Framework-agnostic Python library for executing AI agents with consistent patterns
5
+ Project-URL: Homepage, https://github.com/colstrom/agent_runtime
6
+ Project-URL: Repository, https://github.com/colstrom/agent_runtime
7
+ Author: Chris Olstrom
8
+ License-Expression: MIT
9
+ License-File: LICENSE
10
+ Keywords: agents,ai,async,llm,runtime
11
+ Classifier: Development Status :: 3 - Alpha
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: License :: OSI Approved :: MIT License
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3.11
16
+ Classifier: Programming Language :: Python :: 3.12
17
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
18
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
+ Requires-Python: >=3.11
20
+ Provides-Extra: all
21
+ Requires-Dist: anthropic>=0.18.0; extra == 'all'
22
+ Requires-Dist: langfuse>=2.0.0; extra == 'all'
23
+ Requires-Dist: litellm>=1.0.0; extra == 'all'
24
+ Requires-Dist: openai>=1.0.0; extra == 'all'
25
+ Requires-Dist: redis>=5.0.0; extra == 'all'
26
+ Provides-Extra: anthropic
27
+ Requires-Dist: anthropic>=0.18.0; extra == 'anthropic'
28
+ Provides-Extra: dev
29
+ Requires-Dist: mypy>=1.0.0; extra == 'dev'
30
+ Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
31
+ Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
32
+ Requires-Dist: pytest>=8.0.0; extra == 'dev'
33
+ Requires-Dist: ruff>=0.1.0; extra == 'dev'
34
+ Provides-Extra: langfuse
35
+ Requires-Dist: langfuse>=2.0.0; extra == 'langfuse'
36
+ Provides-Extra: litellm
37
+ Requires-Dist: litellm>=1.0.0; extra == 'litellm'
38
+ Provides-Extra: openai
39
+ Requires-Dist: openai>=1.0.0; extra == 'openai'
40
+ Provides-Extra: redis
41
+ Requires-Dist: redis>=5.0.0; extra == 'redis'
42
+ Description-Content-Type: text/markdown
43
+
44
+ # agent-runtime-core
45
+
46
+ [![PyPI version](https://badge.fury.io/py/agent-runtime-core.svg)](https://badge.fury.io/py/agent-runtime-core)
47
+ [![Python 3.11+](https://img.shields.io/badge/python-3.11+-blue.svg)](https://www.python.org/downloads/)
48
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
49
+
50
+ A lightweight, framework-agnostic Python library for building AI agent systems. Provides the core abstractions and implementations needed to build production-ready AI agents without tying you to any specific framework.
51
+
52
+ ## Features
53
+
54
+ - 🔌 **Framework Agnostic** - Works with LangGraph, CrewAI, OpenAI Agents, or your own custom loops
55
+ - 🤖 **Model Agnostic** - OpenAI, Anthropic, or any provider via LiteLLM
56
+ - 📦 **Zero Required Dependencies** - Core library has no dependencies; add only what you need
57
+ - 🔄 **Async First** - Built for modern async Python with full sync support
58
+ - 🛠️ **Pluggable Backends** - Memory, Redis, or SQLite for queues, events, and state
59
+ - 📊 **Observable** - Built-in tracing with optional Langfuse integration
60
+ - 🧩 **Composable** - Mix and match components to build your ideal agent system
61
+
62
+ ## Installation
63
+
64
+ ```bash
65
+ # Core library (no dependencies)
66
+ pip install agent-runtime-core
67
+
68
+ # With specific LLM providers
69
+ pip install agent-runtime-core[openai]
70
+ pip install agent-runtime-core[anthropic]
71
+ pip install agent-runtime-core[litellm]
72
+
73
+ # With Redis backend support
74
+ pip install agent-runtime-core[redis]
75
+
76
+ # With observability
77
+ pip install agent-runtime-core[langfuse]
78
+
79
+ # Everything
80
+ pip install agent-runtime-core[all]
81
+ ```
82
+
83
+ ## Quick Start
84
+
85
+ ### Basic Configuration
86
+
87
+ ```python
88
+ from agent_runtime import configure, get_config
89
+
90
+ # Configure the runtime
91
+ configure(
92
+ model_provider="openai",
93
+ openai_api_key="sk-...", # Or use OPENAI_API_KEY env var
94
+ default_model="gpt-4o",
95
+ )
96
+
97
+ # Access configuration anywhere
98
+ config = get_config()
99
+ print(config.model_provider) # "openai"
100
+ ```
101
+
102
+ ### Creating an Agent
103
+
104
+ ```python
105
+ from agent_runtime import (
106
+ AgentRuntime,
107
+ RunContext,
108
+ RunResult,
109
+ EventType,
110
+ register_runtime,
111
+ )
112
+
113
+ class MyAgent(AgentRuntime):
114
+ """A simple conversational agent."""
115
+
116
+ @property
117
+ def key(self) -> str:
118
+ return "my-agent"
119
+
120
+ async def run(self, ctx: RunContext) -> RunResult:
121
+ # Access input messages
122
+ messages = ctx.input_messages
123
+
124
+ # Get an LLM client
125
+ from agent_runtime.llm import get_llm_client
126
+ llm = get_llm_client()
127
+
128
+ # Generate a response
129
+ response = await llm.generate(messages)
130
+
131
+ # Emit events for observability
132
+ await ctx.emit(EventType.ASSISTANT_MESSAGE, {
133
+ "content": response.message["content"],
134
+ })
135
+
136
+ # Return the result
137
+ return RunResult(
138
+ final_output={"response": response.message["content"]},
139
+ final_messages=[response.message],
140
+ )
141
+
142
+ # Register the agent
143
+ register_runtime(MyAgent())
144
+ ```
145
+
146
+ ### Using Tools
147
+
148
+ ```python
149
+ from agent_runtime import Tool, ToolRegistry, RunContext, RunResult
150
+
151
+ # Define tools
152
+ def get_weather(location: str) -> str:
153
+ """Get the current weather for a location."""
154
+ return f"The weather in {location} is sunny, 72°F"
155
+
156
+ def search_web(query: str) -> str:
157
+ """Search the web for information."""
158
+ return f"Search results for: {query}"
159
+
160
+ # Create a tool registry
161
+ tools = ToolRegistry()
162
+ tools.register(Tool.from_function(get_weather))
163
+ tools.register(Tool.from_function(search_web))
164
+
165
+ class ToolAgent(AgentRuntime):
166
+ @property
167
+ def key(self) -> str:
168
+ return "tool-agent"
169
+
170
+ async def run(self, ctx: RunContext) -> RunResult:
171
+ from agent_runtime.llm import get_llm_client
172
+ llm = get_llm_client()
173
+
174
+ messages = list(ctx.input_messages)
175
+
176
+ while True:
177
+ # Generate with tools
178
+ response = await llm.generate(
179
+ messages,
180
+ tools=tools.to_openai_format(),
181
+ )
182
+
183
+ messages.append(response.message)
184
+
185
+ # Check for tool calls
186
+ if not response.tool_calls:
187
+ break
188
+
189
+ # Execute tools
190
+ for tool_call in response.tool_calls:
191
+ result = await tools.execute(
192
+ tool_call["function"]["name"],
193
+ tool_call["function"]["arguments"],
194
+ )
195
+
196
+ await ctx.emit(EventType.TOOL_RESULT, {
197
+ "tool_call_id": tool_call["id"],
198
+ "result": result,
199
+ })
200
+
201
+ messages.append({
202
+ "role": "tool",
203
+ "tool_call_id": tool_call["id"],
204
+ "content": str(result),
205
+ })
206
+
207
+ return RunResult(
208
+ final_output={"response": response.message["content"]},
209
+ final_messages=messages,
210
+ )
211
+ ```
212
+
213
+ ### Running Agents
214
+
215
+ ```python
216
+ from agent_runtime import AgentRunner, RunnerConfig, get_runtime
217
+ import asyncio
218
+
219
+ async def main():
220
+ # Get a registered agent
221
+ agent = get_runtime("my-agent")
222
+
223
+ # Create a runner
224
+ runner = AgentRunner(
225
+ config=RunnerConfig(
226
+ run_timeout_seconds=300,
227
+ max_retries=3,
228
+ )
229
+ )
230
+
231
+ # Execute a run
232
+ result = await runner.execute(
233
+ agent=agent,
234
+ run_id="run-123",
235
+ input_data={
236
+ "messages": [
237
+ {"role": "user", "content": "Hello!"}
238
+ ]
239
+ },
240
+ )
241
+
242
+ print(result.final_output)
243
+
244
+ asyncio.run(main())
245
+ ```
246
+
247
+ ## Core Concepts
248
+
249
+ ### AgentRuntime
250
+
251
+ The base class for all agents. Implement the `run` method to define your agent's behavior:
252
+
253
+ ```python
254
+ class AgentRuntime(ABC):
255
+ @property
256
+ @abstractmethod
257
+ def key(self) -> str:
258
+ """Unique identifier for this agent."""
259
+ pass
260
+
261
+ @abstractmethod
262
+ async def run(self, ctx: RunContext) -> RunResult:
263
+ """Execute the agent logic."""
264
+ pass
265
+ ```
266
+
267
+ ### RunContext
268
+
269
+ Provides access to the current run's state and utilities:
270
+
271
+ ```python
272
+ class RunContext:
273
+ run_id: UUID # Unique run identifier
274
+ input_messages: list # Input messages
275
+ metadata: dict # Run metadata
276
+ tools: ToolRegistry # Available tools
277
+
278
+ async def emit(self, event_type: EventType, payload: dict) -> None:
279
+ """Emit an event."""
280
+
281
+ async def checkpoint(self, state: dict) -> None:
282
+ """Save a checkpoint."""
283
+
284
+ def is_cancelled(self) -> bool:
285
+ """Check if run was cancelled."""
286
+ ```
287
+
288
+ ### RunResult
289
+
290
+ The result of an agent run:
291
+
292
+ ```python
293
+ @dataclass
294
+ class RunResult:
295
+ final_output: dict # Structured output
296
+ final_messages: list = None # Conversation history
297
+ error: ErrorInfo = None # Error details if failed
298
+ ```
299
+
300
+ ### Event Types
301
+
302
+ Built-in event types for observability:
303
+
304
+ - `EventType.RUN_STARTED` - Run execution began
305
+ - `EventType.RUN_SUCCEEDED` - Run completed successfully
306
+ - `EventType.RUN_FAILED` - Run failed with error
307
+ - `EventType.TOOL_CALL` - Tool was invoked
308
+ - `EventType.TOOL_RESULT` - Tool returned result
309
+ - `EventType.ASSISTANT_MESSAGE` - LLM generated message
310
+ - `EventType.CHECKPOINT` - State checkpoint saved
311
+
312
+ ## Backend Options
313
+
314
+ ### Queue Backends
315
+
316
+ ```python
317
+ from agent_runtime.queue import MemoryQueue, RedisQueue
318
+
319
+ # In-memory (for development)
320
+ queue = MemoryQueue()
321
+
322
+ # Redis (for production)
323
+ queue = RedisQueue(redis_url="redis://localhost:6379/0")
324
+ ```
325
+
326
+ ### Event Bus Backends
327
+
328
+ ```python
329
+ from agent_runtime.events import MemoryEventBus, RedisEventBus
330
+
331
+ # In-memory
332
+ event_bus = MemoryEventBus()
333
+
334
+ # Redis Pub/Sub
335
+ event_bus = RedisEventBus(redis_url="redis://localhost:6379/0")
336
+ ```
337
+
338
+ ### State Store Backends
339
+
340
+ ```python
341
+ from agent_runtime.state import MemoryStateStore, RedisStateStore, SQLiteStateStore
342
+
343
+ # In-memory
344
+ state = MemoryStateStore()
345
+
346
+ # Redis
347
+ state = RedisStateStore(redis_url="redis://localhost:6379/0")
348
+
349
+ # SQLite (persistent, single-node)
350
+ state = SQLiteStateStore(db_path="./agent_state.db")
351
+ ```
352
+
353
+ ## LLM Clients
354
+
355
+ ### OpenAI
356
+
357
+ ```python
358
+ from agent_runtime.llm import OpenAIClient
359
+
360
+ client = OpenAIClient(
361
+ api_key="sk-...", # Or use OPENAI_API_KEY env var
362
+ default_model="gpt-4o",
363
+ )
364
+
365
+ response = await client.generate([
366
+ {"role": "user", "content": "Hello!"}
367
+ ])
368
+ ```
369
+
370
+ ### Anthropic
371
+
372
+ ```python
373
+ from agent_runtime.llm import AnthropicClient
374
+
375
+ client = AnthropicClient(
376
+ api_key="sk-ant-...", # Or use ANTHROPIC_API_KEY env var
377
+ default_model="claude-3-5-sonnet-20241022",
378
+ )
379
+ ```
380
+
381
+ ### LiteLLM (Any Provider)
382
+
383
+ ```python
384
+ from agent_runtime.llm import LiteLLMClient
385
+
386
+ # Use any LiteLLM-supported model
387
+ client = LiteLLMClient(default_model="gpt-4o")
388
+ client = LiteLLMClient(default_model="claude-3-5-sonnet-20241022")
389
+ client = LiteLLMClient(default_model="ollama/llama2")
390
+ ```
391
+
392
+ ## Tracing & Observability
393
+
394
+ ### Langfuse Integration
395
+
396
+ ```python
397
+ from agent_runtime import configure
398
+
399
+ configure(
400
+ langfuse_enabled=True,
401
+ langfuse_public_key="pk-...",
402
+ langfuse_secret_key="sk-...",
403
+ )
404
+ ```
405
+
406
+ ### Custom Trace Sink
407
+
408
+ ```python
409
+ from agent_runtime import TraceSink
410
+
411
+ class MyTraceSink(TraceSink):
412
+ async def trace(self, event: dict) -> None:
413
+ # Send to your observability platform
414
+ print(f"Trace: {event}")
415
+ ```
416
+
417
+ ## Integration with Django
418
+
419
+ For Django applications, use [django-agent-runtime](https://pypi.org/project/django-agent-runtime/) which provides:
420
+
421
+ - Django models for conversations, runs, and events
422
+ - REST API endpoints
423
+ - Server-Sent Events (SSE) for real-time streaming
424
+ - Management commands for running workers
425
+ - PostgreSQL-backed queue and event bus
426
+
427
+ ```bash
428
+ pip install django-agent-runtime
429
+ ```
430
+
431
+ ## API Reference
432
+
433
+ ### Configuration
434
+
435
+ | Setting | Type | Default | Description |
436
+ |---------|------|---------|-------------|
437
+ | `model_provider` | str | `"openai"` | LLM provider: openai, anthropic, litellm |
438
+ | `default_model` | str | `"gpt-4o"` | Default model to use |
439
+ | `queue_backend` | str | `"memory"` | Queue backend: memory, redis |
440
+ | `event_bus_backend` | str | `"memory"` | Event bus: memory, redis |
441
+ | `state_store_backend` | str | `"memory"` | State store: memory, redis, sqlite |
442
+ | `redis_url` | str | `None` | Redis connection URL |
443
+ | `langfuse_enabled` | bool | `False` | Enable Langfuse tracing |
444
+
445
+ ### Registry Functions
446
+
447
+ ```python
448
+ register_runtime(runtime: AgentRuntime) -> None
449
+ get_runtime(key: str) -> AgentRuntime
450
+ list_runtimes() -> list[str]
451
+ unregister_runtime(key: str) -> None
452
+ clear_registry() -> None
453
+ ```
454
+
455
+ ## Contributing
456
+
457
+ Contributions are welcome! Please feel free to submit a Pull Request.
458
+
459
+ ## License
460
+
461
+ MIT License - see [LICENSE](LICENSE) for details.
@@ -0,0 +1,418 @@
1
+ # agent-runtime-core
2
+
3
+ [![PyPI version](https://badge.fury.io/py/agent-runtime-core.svg)](https://badge.fury.io/py/agent-runtime-core)
4
+ [![Python 3.11+](https://img.shields.io/badge/python-3.11+-blue.svg)](https://www.python.org/downloads/)
5
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
6
+
7
+ A lightweight, framework-agnostic Python library for building AI agent systems. Provides the core abstractions and implementations needed to build production-ready AI agents without tying you to any specific framework.
8
+
9
+ ## Features
10
+
11
+ - 🔌 **Framework Agnostic** - Works with LangGraph, CrewAI, OpenAI Agents, or your own custom loops
12
+ - 🤖 **Model Agnostic** - OpenAI, Anthropic, or any provider via LiteLLM
13
+ - 📦 **Zero Required Dependencies** - Core library has no dependencies; add only what you need
14
+ - 🔄 **Async First** - Built for modern async Python with full sync support
15
+ - 🛠️ **Pluggable Backends** - Memory, Redis, or SQLite for queues, events, and state
16
+ - 📊 **Observable** - Built-in tracing with optional Langfuse integration
17
+ - 🧩 **Composable** - Mix and match components to build your ideal agent system
18
+
19
+ ## Installation
20
+
21
+ ```bash
22
+ # Core library (no dependencies)
23
+ pip install agent-runtime-core
24
+
25
+ # With specific LLM providers
26
+ pip install agent-runtime-core[openai]
27
+ pip install agent-runtime-core[anthropic]
28
+ pip install agent-runtime-core[litellm]
29
+
30
+ # With Redis backend support
31
+ pip install agent-runtime-core[redis]
32
+
33
+ # With observability
34
+ pip install agent-runtime-core[langfuse]
35
+
36
+ # Everything
37
+ pip install agent-runtime-core[all]
38
+ ```
39
+
40
+ ## Quick Start
41
+
42
+ ### Basic Configuration
43
+
44
+ ```python
45
+ from agent_runtime import configure, get_config
46
+
47
+ # Configure the runtime
48
+ configure(
49
+ model_provider="openai",
50
+ openai_api_key="sk-...", # Or use OPENAI_API_KEY env var
51
+ default_model="gpt-4o",
52
+ )
53
+
54
+ # Access configuration anywhere
55
+ config = get_config()
56
+ print(config.model_provider) # "openai"
57
+ ```
58
+
59
+ ### Creating an Agent
60
+
61
+ ```python
62
+ from agent_runtime import (
63
+ AgentRuntime,
64
+ RunContext,
65
+ RunResult,
66
+ EventType,
67
+ register_runtime,
68
+ )
69
+
70
+ class MyAgent(AgentRuntime):
71
+ """A simple conversational agent."""
72
+
73
+ @property
74
+ def key(self) -> str:
75
+ return "my-agent"
76
+
77
+ async def run(self, ctx: RunContext) -> RunResult:
78
+ # Access input messages
79
+ messages = ctx.input_messages
80
+
81
+ # Get an LLM client
82
+ from agent_runtime.llm import get_llm_client
83
+ llm = get_llm_client()
84
+
85
+ # Generate a response
86
+ response = await llm.generate(messages)
87
+
88
+ # Emit events for observability
89
+ await ctx.emit(EventType.ASSISTANT_MESSAGE, {
90
+ "content": response.message["content"],
91
+ })
92
+
93
+ # Return the result
94
+ return RunResult(
95
+ final_output={"response": response.message["content"]},
96
+ final_messages=[response.message],
97
+ )
98
+
99
+ # Register the agent
100
+ register_runtime(MyAgent())
101
+ ```
102
+
103
+ ### Using Tools
104
+
105
+ ```python
106
+ from agent_runtime import Tool, ToolRegistry, RunContext, RunResult
107
+
108
+ # Define tools
109
+ def get_weather(location: str) -> str:
110
+ """Get the current weather for a location."""
111
+ return f"The weather in {location} is sunny, 72°F"
112
+
113
+ def search_web(query: str) -> str:
114
+ """Search the web for information."""
115
+ return f"Search results for: {query}"
116
+
117
+ # Create a tool registry
118
+ tools = ToolRegistry()
119
+ tools.register(Tool.from_function(get_weather))
120
+ tools.register(Tool.from_function(search_web))
121
+
122
+ class ToolAgent(AgentRuntime):
123
+ @property
124
+ def key(self) -> str:
125
+ return "tool-agent"
126
+
127
+ async def run(self, ctx: RunContext) -> RunResult:
128
+ from agent_runtime.llm import get_llm_client
129
+ llm = get_llm_client()
130
+
131
+ messages = list(ctx.input_messages)
132
+
133
+ while True:
134
+ # Generate with tools
135
+ response = await llm.generate(
136
+ messages,
137
+ tools=tools.to_openai_format(),
138
+ )
139
+
140
+ messages.append(response.message)
141
+
142
+ # Check for tool calls
143
+ if not response.tool_calls:
144
+ break
145
+
146
+ # Execute tools
147
+ for tool_call in response.tool_calls:
148
+ result = await tools.execute(
149
+ tool_call["function"]["name"],
150
+ tool_call["function"]["arguments"],
151
+ )
152
+
153
+ await ctx.emit(EventType.TOOL_RESULT, {
154
+ "tool_call_id": tool_call["id"],
155
+ "result": result,
156
+ })
157
+
158
+ messages.append({
159
+ "role": "tool",
160
+ "tool_call_id": tool_call["id"],
161
+ "content": str(result),
162
+ })
163
+
164
+ return RunResult(
165
+ final_output={"response": response.message["content"]},
166
+ final_messages=messages,
167
+ )
168
+ ```
169
+
170
+ ### Running Agents
171
+
172
+ ```python
173
+ from agent_runtime import AgentRunner, RunnerConfig, get_runtime
174
+ import asyncio
175
+
176
+ async def main():
177
+ # Get a registered agent
178
+ agent = get_runtime("my-agent")
179
+
180
+ # Create a runner
181
+ runner = AgentRunner(
182
+ config=RunnerConfig(
183
+ run_timeout_seconds=300,
184
+ max_retries=3,
185
+ )
186
+ )
187
+
188
+ # Execute a run
189
+ result = await runner.execute(
190
+ agent=agent,
191
+ run_id="run-123",
192
+ input_data={
193
+ "messages": [
194
+ {"role": "user", "content": "Hello!"}
195
+ ]
196
+ },
197
+ )
198
+
199
+ print(result.final_output)
200
+
201
+ asyncio.run(main())
202
+ ```
203
+
204
+ ## Core Concepts
205
+
206
+ ### AgentRuntime
207
+
208
+ The base class for all agents. Implement the `run` method to define your agent's behavior:
209
+
210
+ ```python
211
+ class AgentRuntime(ABC):
212
+ @property
213
+ @abstractmethod
214
+ def key(self) -> str:
215
+ """Unique identifier for this agent."""
216
+ pass
217
+
218
+ @abstractmethod
219
+ async def run(self, ctx: RunContext) -> RunResult:
220
+ """Execute the agent logic."""
221
+ pass
222
+ ```
223
+
224
+ ### RunContext
225
+
226
+ Provides access to the current run's state and utilities:
227
+
228
+ ```python
229
+ class RunContext:
230
+ run_id: UUID # Unique run identifier
231
+ input_messages: list # Input messages
232
+ metadata: dict # Run metadata
233
+ tools: ToolRegistry # Available tools
234
+
235
+ async def emit(self, event_type: EventType, payload: dict) -> None:
236
+ """Emit an event."""
237
+
238
+ async def checkpoint(self, state: dict) -> None:
239
+ """Save a checkpoint."""
240
+
241
+ def is_cancelled(self) -> bool:
242
+ """Check if run was cancelled."""
243
+ ```
244
+
245
+ ### RunResult
246
+
247
+ The result of an agent run:
248
+
249
+ ```python
250
+ @dataclass
251
+ class RunResult:
252
+ final_output: dict # Structured output
253
+ final_messages: list = None # Conversation history
254
+ error: ErrorInfo = None # Error details if failed
255
+ ```
256
+
257
+ ### Event Types
258
+
259
+ Built-in event types for observability:
260
+
261
+ - `EventType.RUN_STARTED` - Run execution began
262
+ - `EventType.RUN_SUCCEEDED` - Run completed successfully
263
+ - `EventType.RUN_FAILED` - Run failed with error
264
+ - `EventType.TOOL_CALL` - Tool was invoked
265
+ - `EventType.TOOL_RESULT` - Tool returned result
266
+ - `EventType.ASSISTANT_MESSAGE` - LLM generated message
267
+ - `EventType.CHECKPOINT` - State checkpoint saved
268
+
269
+ ## Backend Options
270
+
271
+ ### Queue Backends
272
+
273
+ ```python
274
+ from agent_runtime.queue import MemoryQueue, RedisQueue
275
+
276
+ # In-memory (for development)
277
+ queue = MemoryQueue()
278
+
279
+ # Redis (for production)
280
+ queue = RedisQueue(redis_url="redis://localhost:6379/0")
281
+ ```
282
+
283
+ ### Event Bus Backends
284
+
285
+ ```python
286
+ from agent_runtime.events import MemoryEventBus, RedisEventBus
287
+
288
+ # In-memory
289
+ event_bus = MemoryEventBus()
290
+
291
+ # Redis Pub/Sub
292
+ event_bus = RedisEventBus(redis_url="redis://localhost:6379/0")
293
+ ```
294
+
295
+ ### State Store Backends
296
+
297
+ ```python
298
+ from agent_runtime.state import MemoryStateStore, RedisStateStore, SQLiteStateStore
299
+
300
+ # In-memory
301
+ state = MemoryStateStore()
302
+
303
+ # Redis
304
+ state = RedisStateStore(redis_url="redis://localhost:6379/0")
305
+
306
+ # SQLite (persistent, single-node)
307
+ state = SQLiteStateStore(db_path="./agent_state.db")
308
+ ```
309
+
310
+ ## LLM Clients
311
+
312
+ ### OpenAI
313
+
314
+ ```python
315
+ from agent_runtime.llm import OpenAIClient
316
+
317
+ client = OpenAIClient(
318
+ api_key="sk-...", # Or use OPENAI_API_KEY env var
319
+ default_model="gpt-4o",
320
+ )
321
+
322
+ response = await client.generate([
323
+ {"role": "user", "content": "Hello!"}
324
+ ])
325
+ ```
326
+
327
+ ### Anthropic
328
+
329
+ ```python
330
+ from agent_runtime.llm import AnthropicClient
331
+
332
+ client = AnthropicClient(
333
+ api_key="sk-ant-...", # Or use ANTHROPIC_API_KEY env var
334
+ default_model="claude-3-5-sonnet-20241022",
335
+ )
336
+ ```
337
+
338
+ ### LiteLLM (Any Provider)
339
+
340
+ ```python
341
+ from agent_runtime.llm import LiteLLMClient
342
+
343
+ # Use any LiteLLM-supported model
344
+ client = LiteLLMClient(default_model="gpt-4o")
345
+ client = LiteLLMClient(default_model="claude-3-5-sonnet-20241022")
346
+ client = LiteLLMClient(default_model="ollama/llama2")
347
+ ```
348
+
349
+ ## Tracing & Observability
350
+
351
+ ### Langfuse Integration
352
+
353
+ ```python
354
+ from agent_runtime import configure
355
+
356
+ configure(
357
+ langfuse_enabled=True,
358
+ langfuse_public_key="pk-...",
359
+ langfuse_secret_key="sk-...",
360
+ )
361
+ ```
362
+
363
+ ### Custom Trace Sink
364
+
365
+ ```python
366
+ from agent_runtime import TraceSink
367
+
368
+ class MyTraceSink(TraceSink):
369
+ async def trace(self, event: dict) -> None:
370
+ # Send to your observability platform
371
+ print(f"Trace: {event}")
372
+ ```
373
+
374
+ ## Integration with Django
375
+
376
+ For Django applications, use [django-agent-runtime](https://pypi.org/project/django-agent-runtime/) which provides:
377
+
378
+ - Django models for conversations, runs, and events
379
+ - REST API endpoints
380
+ - Server-Sent Events (SSE) for real-time streaming
381
+ - Management commands for running workers
382
+ - PostgreSQL-backed queue and event bus
383
+
384
+ ```bash
385
+ pip install django-agent-runtime
386
+ ```
387
+
388
+ ## API Reference
389
+
390
+ ### Configuration
391
+
392
+ | Setting | Type | Default | Description |
393
+ |---------|------|---------|-------------|
394
+ | `model_provider` | str | `"openai"` | LLM provider: openai, anthropic, litellm |
395
+ | `default_model` | str | `"gpt-4o"` | Default model to use |
396
+ | `queue_backend` | str | `"memory"` | Queue backend: memory, redis |
397
+ | `event_bus_backend` | str | `"memory"` | Event bus: memory, redis |
398
+ | `state_store_backend` | str | `"memory"` | State store: memory, redis, sqlite |
399
+ | `redis_url` | str | `None` | Redis connection URL |
400
+ | `langfuse_enabled` | bool | `False` | Enable Langfuse tracing |
401
+
402
+ ### Registry Functions
403
+
404
+ ```python
405
+ register_runtime(runtime: AgentRuntime) -> None
406
+ get_runtime(key: str) -> AgentRuntime
407
+ list_runtimes() -> list[str]
408
+ unregister_runtime(key: str) -> None
409
+ clear_registry() -> None
410
+ ```
411
+
412
+ ## Contributing
413
+
414
+ Contributions are welcome! Please feel free to submit a Pull Request.
415
+
416
+ ## License
417
+
418
+ MIT License - see [LICENSE](LICENSE) for details.
@@ -34,7 +34,7 @@ Example usage:
34
34
  return RunResult(final_output={"message": "Hello!"})
35
35
  """
36
36
 
37
- __version__ = "0.1.0"
37
+ __version__ = "0.1.1"
38
38
 
39
39
  # Core interfaces
40
40
  from agent_runtime.interfaces import (
@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
4
4
 
5
5
  [project]
6
6
  name = "agent-runtime-core"
7
- version = "0.1.0"
7
+ version = "0.1.1"
8
8
  description = "Framework-agnostic Python library for executing AI agents with consistent patterns"
9
9
  readme = "README.md"
10
10
  license = "MIT"
@@ -1,75 +0,0 @@
1
- Metadata-Version: 2.4
2
- Name: agent-runtime-core
3
- Version: 0.1.0
4
- Summary: Framework-agnostic Python library for executing AI agents with consistent patterns
5
- Project-URL: Homepage, https://github.com/colstrom/agent_runtime
6
- Project-URL: Repository, https://github.com/colstrom/agent_runtime
7
- Author: Chris Olstrom
8
- License-Expression: MIT
9
- License-File: LICENSE
10
- Keywords: agents,ai,async,llm,runtime
11
- Classifier: Development Status :: 3 - Alpha
12
- Classifier: Intended Audience :: Developers
13
- Classifier: License :: OSI Approved :: MIT License
14
- Classifier: Programming Language :: Python :: 3
15
- Classifier: Programming Language :: Python :: 3.11
16
- Classifier: Programming Language :: Python :: 3.12
17
- Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
18
- Classifier: Topic :: Software Development :: Libraries :: Python Modules
19
- Requires-Python: >=3.11
20
- Provides-Extra: all
21
- Requires-Dist: anthropic>=0.18.0; extra == 'all'
22
- Requires-Dist: langfuse>=2.0.0; extra == 'all'
23
- Requires-Dist: litellm>=1.0.0; extra == 'all'
24
- Requires-Dist: openai>=1.0.0; extra == 'all'
25
- Requires-Dist: redis>=5.0.0; extra == 'all'
26
- Provides-Extra: anthropic
27
- Requires-Dist: anthropic>=0.18.0; extra == 'anthropic'
28
- Provides-Extra: dev
29
- Requires-Dist: mypy>=1.0.0; extra == 'dev'
30
- Requires-Dist: pytest-asyncio>=0.23.0; extra == 'dev'
31
- Requires-Dist: pytest-cov>=4.0.0; extra == 'dev'
32
- Requires-Dist: pytest>=8.0.0; extra == 'dev'
33
- Requires-Dist: ruff>=0.1.0; extra == 'dev'
34
- Provides-Extra: langfuse
35
- Requires-Dist: langfuse>=2.0.0; extra == 'langfuse'
36
- Provides-Extra: litellm
37
- Requires-Dist: litellm>=1.0.0; extra == 'litellm'
38
- Provides-Extra: openai
39
- Requires-Dist: openai>=1.0.0; extra == 'openai'
40
- Provides-Extra: redis
41
- Requires-Dist: redis>=5.0.0; extra == 'redis'
42
- Description-Content-Type: text/markdown
43
-
44
- # agent_runtime
45
-
46
- Framework-agnostic agent runtime library for Python.
47
-
48
- ## Installation
49
-
50
- ```bash
51
- pip install agent_runtime
52
- ```
53
-
54
- ## Quick Start
55
-
56
- ```python
57
- from agent_runtime import configure, get_llm_client
58
-
59
- # Configure
60
- configure(
61
- model_provider="openai",
62
- openai_api_key="sk-..."
63
- )
64
-
65
- # Get LLM client
66
- llm = get_llm_client()
67
- ```
68
-
69
- ## Features
70
-
71
- - Pluggable backends (memory, redis, sqlite)
72
- - LLM client abstractions (OpenAI, Anthropic, LiteLLM)
73
- - Event streaming
74
- - State management
75
- - Queue-based execution
@@ -1,32 +0,0 @@
1
- # agent_runtime
2
-
3
- Framework-agnostic agent runtime library for Python.
4
-
5
- ## Installation
6
-
7
- ```bash
8
- pip install agent_runtime
9
- ```
10
-
11
- ## Quick Start
12
-
13
- ```python
14
- from agent_runtime import configure, get_llm_client
15
-
16
- # Configure
17
- configure(
18
- model_provider="openai",
19
- openai_api_key="sk-..."
20
- )
21
-
22
- # Get LLM client
23
- llm = get_llm_client()
24
- ```
25
-
26
- ## Features
27
-
28
- - Pluggable backends (memory, redis, sqlite)
29
- - LLM client abstractions (OpenAI, Anthropic, LiteLLM)
30
- - Event streaming
31
- - State management
32
- - Queue-based execution