agent-runtime-core 0.2.1__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. {agent_runtime → agent_runtime_core}/__init__.py +8 -8
  2. {agent_runtime → agent_runtime_core}/config.py +1 -1
  3. {agent_runtime → agent_runtime_core}/events/__init__.py +5 -5
  4. {agent_runtime → agent_runtime_core}/events/memory.py +1 -1
  5. {agent_runtime → agent_runtime_core}/events/redis.py +1 -1
  6. {agent_runtime → agent_runtime_core}/events/sqlite.py +1 -1
  7. {agent_runtime → agent_runtime_core}/llm/__init__.py +6 -6
  8. {agent_runtime → agent_runtime_core}/llm/anthropic.py +4 -4
  9. {agent_runtime → agent_runtime_core}/llm/litellm_client.py +2 -2
  10. {agent_runtime → agent_runtime_core}/llm/openai.py +4 -4
  11. {agent_runtime → agent_runtime_core}/persistence/__init__.py +48 -12
  12. agent_runtime_core/persistence/base.py +737 -0
  13. {agent_runtime → agent_runtime_core}/persistence/file.py +1 -1
  14. {agent_runtime → agent_runtime_core}/persistence/manager.py +122 -14
  15. {agent_runtime → agent_runtime_core}/queue/__init__.py +5 -5
  16. {agent_runtime → agent_runtime_core}/queue/memory.py +1 -1
  17. {agent_runtime → agent_runtime_core}/queue/redis.py +1 -1
  18. {agent_runtime → agent_runtime_core}/queue/sqlite.py +1 -1
  19. {agent_runtime → agent_runtime_core}/registry.py +1 -1
  20. {agent_runtime → agent_runtime_core}/runner.py +6 -6
  21. {agent_runtime → agent_runtime_core}/state/__init__.py +5 -5
  22. {agent_runtime → agent_runtime_core}/state/memory.py +1 -1
  23. {agent_runtime → agent_runtime_core}/state/redis.py +1 -1
  24. {agent_runtime → agent_runtime_core}/state/sqlite.py +1 -1
  25. {agent_runtime → agent_runtime_core}/testing.py +1 -1
  26. {agent_runtime → agent_runtime_core}/tracing/__init__.py +4 -4
  27. {agent_runtime → agent_runtime_core}/tracing/langfuse.py +1 -1
  28. {agent_runtime → agent_runtime_core}/tracing/noop.py +1 -1
  29. {agent_runtime_core-0.2.1.dist-info → agent_runtime_core-0.4.0.dist-info}/METADATA +352 -42
  30. agent_runtime_core-0.4.0.dist-info/RECORD +36 -0
  31. agent_runtime/persistence/base.py +0 -332
  32. agent_runtime_core-0.2.1.dist-info/RECORD +0 -36
  33. {agent_runtime → agent_runtime_core}/events/base.py +0 -0
  34. {agent_runtime → agent_runtime_core}/interfaces.py +0 -0
  35. {agent_runtime → agent_runtime_core}/queue/base.py +0 -0
  36. {agent_runtime → agent_runtime_core}/state/base.py +0 -0
  37. {agent_runtime_core-0.2.1.dist-info → agent_runtime_core-0.4.0.dist-info}/WHEEL +0 -0
  38. {agent_runtime_core-0.2.1.dist-info → agent_runtime_core-0.4.0.dist-info}/licenses/LICENSE +0 -0
@@ -9,7 +9,7 @@ This package provides:
9
9
  - A runner for executing agent runs
10
10
 
11
11
  Example usage:
12
- from agent_runtime import (
12
+ from agent_runtime_core import (
13
13
  AgentRuntime,
14
14
  RunContext,
15
15
  RunResult,
@@ -34,10 +34,10 @@ Example usage:
34
34
  return RunResult(final_output={"message": "Hello!"})
35
35
  """
36
36
 
37
- __version__ = "0.2.1"
37
+ __version__ = "0.3.0"
38
38
 
39
39
  # Core interfaces
40
- from agent_runtime.interfaces import (
40
+ from agent_runtime_core.interfaces import (
41
41
  AgentRuntime,
42
42
  EventType,
43
43
  ErrorInfo,
@@ -54,14 +54,14 @@ from agent_runtime.interfaces import (
54
54
  )
55
55
 
56
56
  # Configuration
57
- from agent_runtime.config import (
57
+ from agent_runtime_core.config import (
58
58
  RuntimeConfig,
59
59
  configure,
60
60
  get_config,
61
61
  )
62
62
 
63
63
  # Registry
64
- from agent_runtime.registry import (
64
+ from agent_runtime_core.registry import (
65
65
  register_runtime,
66
66
  get_runtime,
67
67
  list_runtimes,
@@ -70,7 +70,7 @@ from agent_runtime.registry import (
70
70
  )
71
71
 
72
72
  # Runner
73
- from agent_runtime.runner import (
73
+ from agent_runtime_core.runner import (
74
74
  AgentRunner,
75
75
  RunnerConfig,
76
76
  RunContextImpl,
@@ -78,7 +78,7 @@ from agent_runtime.runner import (
78
78
 
79
79
 
80
80
  # Testing utilities
81
- from agent_runtime.testing import (
81
+ from agent_runtime_core.testing import (
82
82
  MockRunContext,
83
83
  MockLLMClient,
84
84
  MockLLMResponse,
@@ -88,7 +88,7 @@ from agent_runtime.testing import (
88
88
  )
89
89
 
90
90
  # Persistence (memory, conversations, tasks, preferences)
91
- from agent_runtime.persistence import (
91
+ from agent_runtime_core.persistence import (
92
92
  # Abstract interfaces
93
93
  MemoryStore,
94
94
  ConversationStore,
@@ -84,7 +84,7 @@ def configure(**kwargs) -> RuntimeConfig:
84
84
  The configured RuntimeConfig instance
85
85
 
86
86
  Example:
87
- from agent_runtime import configure
87
+ from agent_runtime_core import configure
88
88
 
89
89
  configure(
90
90
  model_provider="openai",
@@ -9,8 +9,8 @@ Provides:
9
9
  - SQLiteEventBus: For persistent local storage
10
10
  """
11
11
 
12
- from agent_runtime.events.base import EventBus, Event
13
- from agent_runtime.events.memory import InMemoryEventBus
12
+ from agent_runtime_core.events.base import EventBus, Event
13
+ from agent_runtime_core.events.memory import InMemoryEventBus
14
14
 
15
15
  __all__ = [
16
16
  "EventBus",
@@ -31,7 +31,7 @@ def get_event_bus(backend: str = None, **kwargs) -> EventBus:
31
31
  Returns:
32
32
  EventBus instance
33
33
  """
34
- from agent_runtime.config import get_config
34
+ from agent_runtime_core.config import get_config
35
35
 
36
36
  config = get_config()
37
37
  backend = backend or config.event_bus_backend
@@ -40,14 +40,14 @@ def get_event_bus(backend: str = None, **kwargs) -> EventBus:
40
40
  return InMemoryEventBus()
41
41
 
42
42
  elif backend == "redis":
43
- from agent_runtime.events.redis import RedisEventBus
43
+ from agent_runtime_core.events.redis import RedisEventBus
44
44
  url = kwargs.get("url") or config.redis_url
45
45
  if not url:
46
46
  raise ValueError("redis_url is required for redis event bus backend")
47
47
  return RedisEventBus(url=url, **kwargs)
48
48
 
49
49
  elif backend == "sqlite":
50
- from agent_runtime.events.sqlite import SQLiteEventBus
50
+ from agent_runtime_core.events.sqlite import SQLiteEventBus
51
51
  path = kwargs.get("path") or config.sqlite_path or "agent_runtime.db"
52
52
  return SQLiteEventBus(path=path)
53
53
 
@@ -13,7 +13,7 @@ from datetime import datetime, timezone
13
13
  from typing import AsyncIterator, Optional
14
14
  from uuid import UUID
15
15
 
16
- from agent_runtime.events.base import EventBus, Event
16
+ from agent_runtime_core.events.base import EventBus, Event
17
17
 
18
18
 
19
19
  class InMemoryEventBus(EventBus):
@@ -12,7 +12,7 @@ import json
12
12
  from typing import AsyncIterator, Optional
13
13
  from uuid import UUID
14
14
 
15
- from agent_runtime.events.base import EventBus, Event
15
+ from agent_runtime_core.events.base import EventBus, Event
16
16
 
17
17
 
18
18
  class RedisEventBus(EventBus):
@@ -15,7 +15,7 @@ from datetime import datetime, timezone
15
15
  from typing import AsyncIterator, Optional
16
16
  from uuid import UUID
17
17
 
18
- from agent_runtime.events.base import EventBus, Event
18
+ from agent_runtime_core.events.base import EventBus, Event
19
19
 
20
20
 
21
21
  class SQLiteEventBus(EventBus):
@@ -8,7 +8,7 @@ Provides:
8
8
  - LiteLLMClient: LiteLLM adapter (optional)
9
9
  """
10
10
 
11
- from agent_runtime.interfaces import LLMClient, LLMResponse, LLMStreamChunk
11
+ from agent_runtime_core.interfaces import LLMClient, LLMResponse, LLMStreamChunk
12
12
 
13
13
  __all__ = [
14
14
  "LLMClient",
@@ -48,7 +48,7 @@ def get_llm_client(provider: str = None, **kwargs) -> LLMClient:
48
48
 
49
49
  Example:
50
50
  # Using config (recommended)
51
- from agent_runtime.config import configure
51
+ from agent_runtime_core.config import configure
52
52
  configure(model_provider="openai", openai_api_key="sk-...")
53
53
  llm = get_llm_client()
54
54
 
@@ -58,21 +58,21 @@ def get_llm_client(provider: str = None, **kwargs) -> LLMClient:
58
58
  # Or with a different provider
59
59
  llm = get_llm_client(provider='anthropic', api_key='sk-ant-...')
60
60
  """
61
- from agent_runtime.config import get_config
61
+ from agent_runtime_core.config import get_config
62
62
 
63
63
  config = get_config()
64
64
  provider = provider or config.model_provider
65
65
 
66
66
  if provider == "openai":
67
- from agent_runtime.llm.openai import OpenAIClient
67
+ from agent_runtime_core.llm.openai import OpenAIClient
68
68
  return OpenAIClient(**kwargs)
69
69
 
70
70
  elif provider == "anthropic":
71
- from agent_runtime.llm.anthropic import AnthropicClient
71
+ from agent_runtime_core.llm.anthropic import AnthropicClient
72
72
  return AnthropicClient(**kwargs)
73
73
 
74
74
  elif provider == "litellm":
75
- from agent_runtime.llm.litellm_client import LiteLLMClient
75
+ from agent_runtime_core.llm.litellm_client import LiteLLMClient
76
76
  return LiteLLMClient(**kwargs)
77
77
 
78
78
  else:
@@ -5,7 +5,7 @@ Anthropic API client implementation.
5
5
  import os
6
6
  from typing import AsyncIterator, Optional
7
7
 
8
- from agent_runtime.interfaces import (
8
+ from agent_runtime_core.interfaces import (
9
9
  LLMClient,
10
10
  LLMResponse,
11
11
  LLMStreamChunk,
@@ -43,7 +43,7 @@ class AnthropicClient(LLMClient):
43
43
  "Install it with: pip install agent-runtime-core[anthropic]"
44
44
  )
45
45
 
46
- from agent_runtime.config import get_config
46
+ from agent_runtime_core.config import get_config
47
47
  config = get_config()
48
48
 
49
49
  self.default_model = default_model or config.default_model or "claude-sonnet-4-20250514"
@@ -56,7 +56,7 @@ class AnthropicClient(LLMClient):
56
56
  "Anthropic API key is not configured.\n\n"
57
57
  "Configure it using one of these methods:\n"
58
58
  " 1. Use configure():\n"
59
- " from agent_runtime.config import configure\n"
59
+ " from agent_runtime_core.config import configure\n"
60
60
  " configure(anthropic_api_key='sk-ant-...')\n\n"
61
61
  " 2. Set the ANTHROPIC_API_KEY environment variable:\n"
62
62
  " export ANTHROPIC_API_KEY='sk-ant-...'\n\n"
@@ -81,7 +81,7 @@ class AnthropicClient(LLMClient):
81
81
  if explicit_key:
82
82
  return explicit_key
83
83
 
84
- from agent_runtime.config import get_config
84
+ from agent_runtime_core.config import get_config
85
85
  config = get_config()
86
86
  settings_key = config.get_anthropic_api_key()
87
87
  if settings_key:
@@ -7,7 +7,7 @@ This is an OPTIONAL adapter - the core runtime doesn't depend on it.
7
7
 
8
8
  from typing import AsyncIterator, Optional
9
9
 
10
- from agent_runtime.interfaces import (
10
+ from agent_runtime_core.interfaces import (
11
11
  LLMClient,
12
12
  LLMResponse,
13
13
  LLMStreamChunk,
@@ -41,7 +41,7 @@ class LiteLLMClient(LLMClient):
41
41
  "Install it with: pip install agent-runtime-core[litellm]"
42
42
  )
43
43
 
44
- from agent_runtime.config import get_config
44
+ from agent_runtime_core.config import get_config
45
45
  config = get_config()
46
46
 
47
47
  self.default_model = default_model or config.default_model or "gpt-4o"
@@ -5,7 +5,7 @@ OpenAI API client implementation.
5
5
  import os
6
6
  from typing import AsyncIterator, Optional
7
7
 
8
- from agent_runtime.interfaces import (
8
+ from agent_runtime_core.interfaces import (
9
9
  LLMClient,
10
10
  LLMResponse,
11
11
  LLMStreamChunk,
@@ -45,7 +45,7 @@ class OpenAIClient(LLMClient):
45
45
  "Install it with: pip install agent-runtime-core[openai]"
46
46
  )
47
47
 
48
- from agent_runtime.config import get_config
48
+ from agent_runtime_core.config import get_config
49
49
  config = get_config()
50
50
 
51
51
  self.default_model = default_model or config.default_model
@@ -66,7 +66,7 @@ class OpenAIClient(LLMClient):
66
66
  "OpenAI API key is not configured.\n\n"
67
67
  "Configure it using one of these methods:\n"
68
68
  " 1. Use configure():\n"
69
- " from agent_runtime.config import configure\n"
69
+ " from agent_runtime_core.config import configure\n"
70
70
  " configure(openai_api_key='sk-...')\n\n"
71
71
  " 2. Set the OPENAI_API_KEY environment variable:\n"
72
72
  " export OPENAI_API_KEY='sk-...'\n\n"
@@ -87,7 +87,7 @@ class OpenAIClient(LLMClient):
87
87
  if explicit_key:
88
88
  return explicit_key
89
89
 
90
- from agent_runtime.config import get_config
90
+ from agent_runtime_core.config import get_config
91
91
  config = get_config()
92
92
  settings_key = config.get_openai_api_key()
93
93
  if settings_key:
@@ -6,9 +6,11 @@ This module provides pluggable storage backends for:
6
6
  - Conversation history (full conversation state including tool calls)
7
7
  - Task state (task lists and progress)
8
8
  - Preferences (user and agent configuration)
9
+ - Knowledge base (facts, summaries, embeddings) - optional
10
+ - Audit/history (logs, errors, metrics) - optional
9
11
 
10
12
  Example usage:
11
- from agent_runtime.persistence import (
13
+ from agent_runtime_core.persistence import (
12
14
  MemoryStore,
13
15
  ConversationStore,
14
16
  FileMemoryStore,
@@ -16,43 +18,61 @@ Example usage:
16
18
  PersistenceManager,
17
19
  Scope,
18
20
  )
19
-
21
+
20
22
  # Use the high-level manager
21
23
  manager = PersistenceManager()
22
-
24
+
23
25
  # Store global memory
24
26
  await manager.memory.set("user_name", "Alice", scope=Scope.GLOBAL)
25
-
27
+
26
28
  # Store project-specific memory
27
29
  await manager.memory.set("project_type", "python", scope=Scope.PROJECT)
28
-
30
+
29
31
  # Save a conversation
30
32
  await manager.conversations.save(conversation)
31
33
  """
32
34
 
33
- from agent_runtime.persistence.base import (
35
+ from agent_runtime_core.persistence.base import (
36
+ # Core stores
34
37
  MemoryStore,
35
38
  ConversationStore,
36
39
  TaskStore,
37
40
  PreferencesStore,
41
+ # Optional stores
42
+ KnowledgeStore,
43
+ AuditStore,
44
+ # Enums
38
45
  Scope,
46
+ TaskState,
47
+ FactType,
48
+ AuditEventType,
49
+ ErrorSeverity,
50
+ # Conversation models
39
51
  Conversation,
40
52
  ConversationMessage,
41
53
  ToolCall,
42
54
  ToolResult,
55
+ # Task models
43
56
  TaskList,
44
57
  Task,
45
- TaskState,
58
+ # Knowledge models
59
+ Fact,
60
+ Summary,
61
+ Embedding,
62
+ # Audit models
63
+ AuditEntry,
64
+ ErrorRecord,
65
+ PerformanceMetric,
46
66
  )
47
67
 
48
- from agent_runtime.persistence.file import (
68
+ from agent_runtime_core.persistence.file import (
49
69
  FileMemoryStore,
50
70
  FileConversationStore,
51
71
  FileTaskStore,
52
72
  FilePreferencesStore,
53
73
  )
54
74
 
55
- from agent_runtime.persistence.manager import (
75
+ from agent_runtime_core.persistence.manager import (
56
76
  PersistenceManager,
57
77
  PersistenceConfig,
58
78
  get_persistence_manager,
@@ -60,20 +80,36 @@ from agent_runtime.persistence.manager import (
60
80
  )
61
81
 
62
82
  __all__ = [
63
- # Abstract interfaces
83
+ # Abstract interfaces - core
64
84
  "MemoryStore",
65
85
  "ConversationStore",
66
86
  "TaskStore",
67
87
  "PreferencesStore",
88
+ # Abstract interfaces - optional
89
+ "KnowledgeStore",
90
+ "AuditStore",
91
+ # Enums
68
92
  "Scope",
69
- # Data classes
93
+ "TaskState",
94
+ "FactType",
95
+ "AuditEventType",
96
+ "ErrorSeverity",
97
+ # Conversation models
70
98
  "Conversation",
71
99
  "ConversationMessage",
72
100
  "ToolCall",
73
101
  "ToolResult",
102
+ # Task models
74
103
  "TaskList",
75
104
  "Task",
76
- "TaskState",
105
+ # Knowledge models
106
+ "Fact",
107
+ "Summary",
108
+ "Embedding",
109
+ # Audit models
110
+ "AuditEntry",
111
+ "ErrorRecord",
112
+ "PerformanceMetric",
77
113
  # File implementations
78
114
  "FileMemoryStore",
79
115
  "FileConversationStore",