agentic-programming 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentic/__init__.py +48 -0
- agentic/apps/__init__.py +1 -0
- agentic/apps/mini_lesson.py +47 -0
- agentic/cli.py +319 -0
- agentic/context.py +574 -0
- agentic/function.py +232 -0
- agentic/functions/__init__.py +2 -0
- agentic/functions/extract_domain.py +19 -0
- agentic/functions/sentiment.py +17 -0
- agentic/functions/word_count.py +14 -0
- agentic/mcp/__init__.py +1 -0
- agentic/mcp/__main__.py +4 -0
- agentic/mcp/server.py +189 -0
- agentic/meta_functions/__init__.py +17 -0
- agentic/meta_functions/_helpers.py +265 -0
- agentic/meta_functions/create.py +108 -0
- agentic/meta_functions/create_app.py +136 -0
- agentic/meta_functions/create_skill.py +62 -0
- agentic/meta_functions/fix.py +109 -0
- agentic/providers/__init__.py +169 -0
- agentic/providers/anthropic.py +234 -0
- agentic/providers/claude_code.py +327 -0
- agentic/providers/codex.py +275 -0
- agentic/providers/gemini.py +211 -0
- agentic/providers/gemini_cli.py +165 -0
- agentic/providers/openai.py +249 -0
- agentic/runtime.py +232 -0
- agentic_programming-0.4.0.dist-info/LICENSE +21 -0
- agentic_programming-0.4.0.dist-info/METADATA +373 -0
- agentic_programming-0.4.0.dist-info/RECORD +33 -0
- agentic_programming-0.4.0.dist-info/WHEEL +5 -0
- agentic_programming-0.4.0.dist-info/entry_points.txt +2 -0
- agentic_programming-0.4.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
"""
|
|
2
|
+
agentic.providers — Built-in Runtime implementations for popular LLM providers.
|
|
3
|
+
|
|
4
|
+
Each provider is an optional dependency. Import will give a clear error
|
|
5
|
+
if the required SDK is not installed.
|
|
6
|
+
|
|
7
|
+
Available providers:
|
|
8
|
+
AnthropicRuntime — Anthropic Claude API (text + image, prompt caching)
|
|
9
|
+
OpenAIRuntime — OpenAI GPT API (text + image, response_format)
|
|
10
|
+
GeminiRuntime — Google Gemini API (text + image)
|
|
11
|
+
ClaudeCodeRuntime — Claude Code CLI (no API key, uses subscription)
|
|
12
|
+
CodexRuntime — OpenAI Codex CLI (no API key in harness, uses codex auth)
|
|
13
|
+
GeminiCLIRuntime — Gemini CLI (no API key, uses Google account)
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
from agentic.providers import AnthropicRuntime
|
|
17
|
+
rt = AnthropicRuntime(api_key="sk-...", model="claude-sonnet-4-20250514")
|
|
18
|
+
|
|
19
|
+
from agentic.providers import OpenAIRuntime
|
|
20
|
+
rt = OpenAIRuntime(api_key="sk-...", model="gpt-4o")
|
|
21
|
+
|
|
22
|
+
from agentic.providers import GeminiRuntime
|
|
23
|
+
rt = GeminiRuntime(api_key="...", model="gemini-2.5-flash")
|
|
24
|
+
|
|
25
|
+
from agentic.providers import CodexRuntime
|
|
26
|
+
rt = CodexRuntime(model="o4-mini")
|
|
27
|
+
|
|
28
|
+
Auto-detection:
|
|
29
|
+
from agentic.providers import detect_provider, create_runtime
|
|
30
|
+
|
|
31
|
+
provider, model = detect_provider() # auto-detect best available
|
|
32
|
+
rt = create_runtime() # create runtime with auto-detection
|
|
33
|
+
rt = create_runtime(provider="anthropic", model="claude-sonnet-4-20250514")
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
import os
|
|
37
|
+
import shutil
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# -- Provider registry -------------------------------------------------------
|
|
41
|
+
|
|
42
|
+
# Maps provider name -> (class_name, module_path, default_model)
|
|
43
|
+
PROVIDERS = {
|
|
44
|
+
"claude-code": ("ClaudeCodeRuntime", "agentic.providers.claude_code", "sonnet"),
|
|
45
|
+
"codex": ("CodexRuntime", "agentic.providers.codex", "o4-mini"),
|
|
46
|
+
"gemini-cli": ("GeminiCLIRuntime", "agentic.providers.gemini_cli", "default"),
|
|
47
|
+
"anthropic": ("AnthropicRuntime", "agentic.providers.anthropic", "claude-sonnet-4-20250514"),
|
|
48
|
+
"openai": ("OpenAIRuntime", "agentic.providers.openai", "gpt-4o"),
|
|
49
|
+
"gemini": ("GeminiRuntime", "agentic.providers.gemini", "gemini-2.5-flash"),
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def detect_provider() -> tuple[str, str]:
|
|
54
|
+
"""Auto-detect the best available LLM provider.
|
|
55
|
+
|
|
56
|
+
Detection priority (CLI-first, then API keys):
|
|
57
|
+
1. Claude Code CLI (`claude` in PATH) — subscription, no per-token cost
|
|
58
|
+
2. Codex CLI (`codex` in PATH) — uses codex auth
|
|
59
|
+
3. Gemini CLI (`gemini` in PATH) — uses Google account
|
|
60
|
+
4. Anthropic API (ANTHROPIC_API_KEY set) — pay per token
|
|
61
|
+
5. OpenAI API (OPENAI_API_KEY set) — pay per token
|
|
62
|
+
6. Gemini API (GOOGLE_API_KEY set) — pay per token
|
|
63
|
+
|
|
64
|
+
Returns:
|
|
65
|
+
(provider_name, default_model) — e.g. ("claude-code", "sonnet")
|
|
66
|
+
|
|
67
|
+
Raises:
|
|
68
|
+
RuntimeError if no provider is found.
|
|
69
|
+
"""
|
|
70
|
+
# CLI providers (no API key needed)
|
|
71
|
+
if shutil.which("claude"):
|
|
72
|
+
return "claude-code", "sonnet"
|
|
73
|
+
if shutil.which("codex"):
|
|
74
|
+
return "codex", "o4-mini"
|
|
75
|
+
if shutil.which("gemini"):
|
|
76
|
+
return "gemini-cli", "default"
|
|
77
|
+
|
|
78
|
+
# API providers (need keys)
|
|
79
|
+
if os.environ.get("ANTHROPIC_API_KEY"):
|
|
80
|
+
return "anthropic", "claude-sonnet-4-20250514"
|
|
81
|
+
if os.environ.get("OPENAI_API_KEY"):
|
|
82
|
+
return "openai", "gpt-4o"
|
|
83
|
+
if os.environ.get("GOOGLE_API_KEY"):
|
|
84
|
+
return "gemini", "gemini-2.5-flash"
|
|
85
|
+
|
|
86
|
+
raise RuntimeError(
|
|
87
|
+
"No LLM provider found. Set up one of the following:\n"
|
|
88
|
+
"\n"
|
|
89
|
+
" CLI providers (no API key needed):\n"
|
|
90
|
+
" 1. Claude Code CLI: npm install -g @anthropic-ai/claude-code && claude login\n"
|
|
91
|
+
" 2. Codex CLI: npm install -g @openai/codex && codex auth\n"
|
|
92
|
+
" 3. Gemini CLI: npm install -g @anthropic-ai/gemini-cli\n"
|
|
93
|
+
"\n"
|
|
94
|
+
" API providers (set environment variable):\n"
|
|
95
|
+
" 4. Anthropic: export ANTHROPIC_API_KEY=sk-ant-...\n"
|
|
96
|
+
" 5. OpenAI: export OPENAI_API_KEY=sk-...\n"
|
|
97
|
+
" 6. Gemini: export GOOGLE_API_KEY=...\n"
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def create_runtime(provider: str = None, model: str = None, **kwargs):
|
|
102
|
+
"""Create a Runtime instance with auto-detection or explicit provider.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
provider: Provider name (e.g. "anthropic", "claude-code", "openai").
|
|
106
|
+
If None, auto-detects the best available provider.
|
|
107
|
+
model: Model name override.
|
|
108
|
+
**kwargs: Forwarded to the provider Runtime constructor.
|
|
109
|
+
|
|
110
|
+
Returns:
|
|
111
|
+
A Runtime instance ready to use.
|
|
112
|
+
"""
|
|
113
|
+
import importlib
|
|
114
|
+
|
|
115
|
+
if provider:
|
|
116
|
+
if provider not in PROVIDERS:
|
|
117
|
+
available = ", ".join(sorted(PROVIDERS.keys()))
|
|
118
|
+
raise ValueError(
|
|
119
|
+
f"Unknown provider: {provider!r}. Available: {available}"
|
|
120
|
+
)
|
|
121
|
+
class_name, module_path, default_model = PROVIDERS[provider]
|
|
122
|
+
else:
|
|
123
|
+
detected, default_model = detect_provider()
|
|
124
|
+
class_name, module_path, _ = PROVIDERS[detected]
|
|
125
|
+
provider = detected
|
|
126
|
+
|
|
127
|
+
use_model = model or default_model
|
|
128
|
+
|
|
129
|
+
mod = importlib.import_module(module_path)
|
|
130
|
+
cls = getattr(mod, class_name)
|
|
131
|
+
return cls(model=use_model, **kwargs)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
# -- Lazy imports for direct class access ------------------------------------
|
|
135
|
+
|
|
136
|
+
def __getattr__(name):
|
|
137
|
+
"""Lazy imports — only load a provider when accessed."""
|
|
138
|
+
if name == "AnthropicRuntime":
|
|
139
|
+
from agentic.providers.anthropic import AnthropicRuntime
|
|
140
|
+
return AnthropicRuntime
|
|
141
|
+
if name == "OpenAIRuntime":
|
|
142
|
+
from agentic.providers.openai import OpenAIRuntime
|
|
143
|
+
return OpenAIRuntime
|
|
144
|
+
if name == "GeminiRuntime":
|
|
145
|
+
from agentic.providers.gemini import GeminiRuntime
|
|
146
|
+
return GeminiRuntime
|
|
147
|
+
if name == "ClaudeCodeRuntime":
|
|
148
|
+
from agentic.providers.claude_code import ClaudeCodeRuntime
|
|
149
|
+
return ClaudeCodeRuntime
|
|
150
|
+
if name == "CodexRuntime":
|
|
151
|
+
from agentic.providers.codex import CodexRuntime
|
|
152
|
+
return CodexRuntime
|
|
153
|
+
if name == "GeminiCLIRuntime":
|
|
154
|
+
from agentic.providers.gemini_cli import GeminiCLIRuntime
|
|
155
|
+
return GeminiCLIRuntime
|
|
156
|
+
raise AttributeError(f"module 'agentic.providers' has no attribute {name!r}")
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
__all__ = [
|
|
160
|
+
"PROVIDERS",
|
|
161
|
+
"detect_provider",
|
|
162
|
+
"create_runtime",
|
|
163
|
+
"AnthropicRuntime",
|
|
164
|
+
"OpenAIRuntime",
|
|
165
|
+
"GeminiRuntime",
|
|
166
|
+
"ClaudeCodeRuntime",
|
|
167
|
+
"CodexRuntime",
|
|
168
|
+
"GeminiCLIRuntime",
|
|
169
|
+
]
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
"""
|
|
2
|
+
AnthropicRuntime — Runtime subclass for Anthropic Claude API.
|
|
3
|
+
|
|
4
|
+
Supports:
|
|
5
|
+
- Text and image content blocks
|
|
6
|
+
- PDF/document content blocks (Anthropic document type)
|
|
7
|
+
- Prompt caching via cache_control
|
|
8
|
+
- System prompts
|
|
9
|
+
- Max tokens configuration
|
|
10
|
+
|
|
11
|
+
Requires: pip install anthropic
|
|
12
|
+
|
|
13
|
+
Usage:
|
|
14
|
+
from agentic.providers import AnthropicRuntime
|
|
15
|
+
|
|
16
|
+
rt = AnthropicRuntime(api_key="sk-...", model="claude-sonnet-4-20250514")
|
|
17
|
+
|
|
18
|
+
@agentic_function
|
|
19
|
+
def analyze(task):
|
|
20
|
+
'''Analyze the given task.'''
|
|
21
|
+
return rt.exec(content=[
|
|
22
|
+
{"type": "text", "text": f"Analyze: {task}"},
|
|
23
|
+
])
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
from __future__ import annotations
|
|
27
|
+
|
|
28
|
+
import base64
|
|
29
|
+
import mimetypes
|
|
30
|
+
import os
|
|
31
|
+
from typing import Optional
|
|
32
|
+
|
|
33
|
+
from agentic.runtime import Runtime
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
import anthropic
|
|
37
|
+
except ImportError:
|
|
38
|
+
raise ImportError(
|
|
39
|
+
"AnthropicRuntime requires the 'anthropic' package.\n"
|
|
40
|
+
"Install it with: pip install anthropic"
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class AnthropicRuntime(Runtime):
|
|
45
|
+
"""
|
|
46
|
+
Runtime implementation for Anthropic Claude.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
api_key: Anthropic API key. If None, reads from ANTHROPIC_API_KEY env var.
|
|
50
|
+
model: Default model name (e.g. "claude-sonnet-4-20250514").
|
|
51
|
+
max_tokens: Maximum tokens in the response (default: 4096).
|
|
52
|
+
system: System prompt. If provided, sent as the system parameter.
|
|
53
|
+
cache_system: Whether to cache the system prompt (default: True).
|
|
54
|
+
Adds cache_control to the system block for prompt caching.
|
|
55
|
+
max_retries: Maximum number of exec() attempts before raising.
|
|
56
|
+
**client_kwargs: Additional kwargs passed to anthropic.Anthropic().
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
def __init__(
|
|
60
|
+
self,
|
|
61
|
+
api_key: Optional[str] = None,
|
|
62
|
+
model: str = "claude-sonnet-4-20250514",
|
|
63
|
+
max_tokens: int = 4096,
|
|
64
|
+
system: Optional[str] = None,
|
|
65
|
+
cache_system: bool = True,
|
|
66
|
+
max_retries: int = 2,
|
|
67
|
+
**client_kwargs,
|
|
68
|
+
):
|
|
69
|
+
super().__init__(model=model, max_retries=max_retries)
|
|
70
|
+
self.max_tokens = max_tokens
|
|
71
|
+
self.system = system
|
|
72
|
+
self.cache_system = cache_system
|
|
73
|
+
|
|
74
|
+
api_key = api_key or os.environ.get("ANTHROPIC_API_KEY")
|
|
75
|
+
if not api_key:
|
|
76
|
+
raise ValueError(
|
|
77
|
+
"Anthropic API key is required. Pass api_key= or set ANTHROPIC_API_KEY env var."
|
|
78
|
+
)
|
|
79
|
+
self.client = anthropic.Anthropic(api_key=api_key, **client_kwargs)
|
|
80
|
+
|
|
81
|
+
def _call(
|
|
82
|
+
self,
|
|
83
|
+
content: list[dict],
|
|
84
|
+
model: str = "default",
|
|
85
|
+
response_format: Optional[dict] = None,
|
|
86
|
+
) -> str:
|
|
87
|
+
"""
|
|
88
|
+
Call Anthropic Claude API.
|
|
89
|
+
|
|
90
|
+
Content blocks are converted to Anthropic's format:
|
|
91
|
+
{"type": "text", "text": "..."}
|
|
92
|
+
→ {"type": "text", "text": "..."}
|
|
93
|
+
|
|
94
|
+
{"type": "image", "path": "screenshot.png"}
|
|
95
|
+
→ {"type": "image", "source": {"type": "base64", ...}}
|
|
96
|
+
|
|
97
|
+
{"type": "image", "data": "<base64>", "media_type": "image/png"}
|
|
98
|
+
→ {"type": "image", "source": {"type": "base64", ...}}
|
|
99
|
+
|
|
100
|
+
If cache_control is set on a content block, it's passed through.
|
|
101
|
+
"""
|
|
102
|
+
messages_content = []
|
|
103
|
+
for block in content:
|
|
104
|
+
converted = self._convert_block(block)
|
|
105
|
+
if converted:
|
|
106
|
+
messages_content.append(converted)
|
|
107
|
+
|
|
108
|
+
# Enable prompt caching on the last content block
|
|
109
|
+
if messages_content:
|
|
110
|
+
messages_content[-1]["cache_control"] = {"type": "ephemeral"}
|
|
111
|
+
|
|
112
|
+
kwargs = {
|
|
113
|
+
"model": model if model != "default" else self.model,
|
|
114
|
+
"max_tokens": self.max_tokens,
|
|
115
|
+
"messages": [{"role": "user", "content": messages_content}],
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
# System prompt with optional caching
|
|
119
|
+
if self.system:
|
|
120
|
+
if self.cache_system:
|
|
121
|
+
kwargs["system"] = [
|
|
122
|
+
{
|
|
123
|
+
"type": "text",
|
|
124
|
+
"text": self.system,
|
|
125
|
+
"cache_control": {"type": "ephemeral"},
|
|
126
|
+
}
|
|
127
|
+
]
|
|
128
|
+
else:
|
|
129
|
+
kwargs["system"] = self.system
|
|
130
|
+
|
|
131
|
+
response = self.client.messages.create(**kwargs)
|
|
132
|
+
return response.content[0].text
|
|
133
|
+
|
|
134
|
+
def _convert_block(self, block: dict) -> Optional[dict]:
|
|
135
|
+
"""Convert a generic content block to Anthropic format."""
|
|
136
|
+
block_type = block.get("type", "text")
|
|
137
|
+
|
|
138
|
+
if block_type == "text":
|
|
139
|
+
result = {"type": "text", "text": block["text"]}
|
|
140
|
+
if "cache_control" in block:
|
|
141
|
+
result["cache_control"] = block["cache_control"]
|
|
142
|
+
return result
|
|
143
|
+
|
|
144
|
+
if block_type == "image":
|
|
145
|
+
# Image from base64 data
|
|
146
|
+
if "data" in block:
|
|
147
|
+
media_type = block.get("media_type", "image/png")
|
|
148
|
+
return {
|
|
149
|
+
"type": "image",
|
|
150
|
+
"source": {
|
|
151
|
+
"type": "base64",
|
|
152
|
+
"media_type": media_type,
|
|
153
|
+
"data": block["data"],
|
|
154
|
+
},
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
# Image from file path
|
|
158
|
+
if "path" in block:
|
|
159
|
+
path = block["path"]
|
|
160
|
+
media_type = mimetypes.guess_type(path)[0] or "image/png"
|
|
161
|
+
with open(path, "rb") as f:
|
|
162
|
+
data = base64.b64encode(f.read()).decode("utf-8")
|
|
163
|
+
return {
|
|
164
|
+
"type": "image",
|
|
165
|
+
"source": {
|
|
166
|
+
"type": "base64",
|
|
167
|
+
"media_type": media_type,
|
|
168
|
+
"data": data,
|
|
169
|
+
},
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
# Image from URL
|
|
173
|
+
if "url" in block:
|
|
174
|
+
return {
|
|
175
|
+
"type": "image",
|
|
176
|
+
"source": {
|
|
177
|
+
"type": "url",
|
|
178
|
+
"url": block["url"],
|
|
179
|
+
},
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
if block_type == "file":
|
|
183
|
+
# PDF/document support via Anthropic's document content type
|
|
184
|
+
mime_type = block.get("mime_type", "application/pdf")
|
|
185
|
+
|
|
186
|
+
if "data" in block:
|
|
187
|
+
return {
|
|
188
|
+
"type": "document",
|
|
189
|
+
"source": {
|
|
190
|
+
"type": "base64",
|
|
191
|
+
"media_type": mime_type,
|
|
192
|
+
"data": block["data"],
|
|
193
|
+
},
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
if "path" in block:
|
|
197
|
+
path = block["path"]
|
|
198
|
+
detected_mime = mimetypes.guess_type(path)[0] or mime_type
|
|
199
|
+
with open(path, "rb") as f:
|
|
200
|
+
data = base64.b64encode(f.read()).decode("utf-8")
|
|
201
|
+
return {
|
|
202
|
+
"type": "document",
|
|
203
|
+
"source": {
|
|
204
|
+
"type": "base64",
|
|
205
|
+
"media_type": detected_mime,
|
|
206
|
+
"data": data,
|
|
207
|
+
},
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
if block_type == "audio":
|
|
211
|
+
import warnings
|
|
212
|
+
warnings.warn(
|
|
213
|
+
"AnthropicRuntime does not support audio content blocks. "
|
|
214
|
+
"Audio block will be skipped. Consider using GeminiRuntime or OpenAIRuntime for audio.",
|
|
215
|
+
UserWarning,
|
|
216
|
+
stacklevel=3,
|
|
217
|
+
)
|
|
218
|
+
return None
|
|
219
|
+
|
|
220
|
+
if block_type == "video":
|
|
221
|
+
import warnings
|
|
222
|
+
warnings.warn(
|
|
223
|
+
"AnthropicRuntime does not support video content blocks. "
|
|
224
|
+
"Video block will be skipped. Consider using GeminiRuntime for video.",
|
|
225
|
+
UserWarning,
|
|
226
|
+
stacklevel=3,
|
|
227
|
+
)
|
|
228
|
+
return None
|
|
229
|
+
|
|
230
|
+
# Unknown block type — pass text representation
|
|
231
|
+
if "text" in block:
|
|
232
|
+
return {"type": "text", "text": block["text"]}
|
|
233
|
+
|
|
234
|
+
return None
|