pythonclaw 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pythonclaw/__init__.py +17 -0
- pythonclaw/__main__.py +6 -0
- pythonclaw/channels/discord_bot.py +231 -0
- pythonclaw/channels/telegram_bot.py +236 -0
- pythonclaw/config.py +190 -0
- pythonclaw/core/__init__.py +25 -0
- pythonclaw/core/agent.py +773 -0
- pythonclaw/core/compaction.py +220 -0
- pythonclaw/core/knowledge/rag.py +93 -0
- pythonclaw/core/llm/anthropic_client.py +107 -0
- pythonclaw/core/llm/base.py +26 -0
- pythonclaw/core/llm/gemini_client.py +139 -0
- pythonclaw/core/llm/openai_compatible.py +39 -0
- pythonclaw/core/llm/response.py +57 -0
- pythonclaw/core/memory/manager.py +120 -0
- pythonclaw/core/memory/storage.py +164 -0
- pythonclaw/core/persistent_agent.py +103 -0
- pythonclaw/core/retrieval/__init__.py +6 -0
- pythonclaw/core/retrieval/chunker.py +78 -0
- pythonclaw/core/retrieval/dense.py +152 -0
- pythonclaw/core/retrieval/fusion.py +51 -0
- pythonclaw/core/retrieval/reranker.py +112 -0
- pythonclaw/core/retrieval/retriever.py +166 -0
- pythonclaw/core/retrieval/sparse.py +69 -0
- pythonclaw/core/session_store.py +269 -0
- pythonclaw/core/skill_loader.py +322 -0
- pythonclaw/core/skillhub.py +290 -0
- pythonclaw/core/tools.py +622 -0
- pythonclaw/core/utils.py +64 -0
- pythonclaw/daemon.py +221 -0
- pythonclaw/init.py +61 -0
- pythonclaw/main.py +489 -0
- pythonclaw/onboard.py +290 -0
- pythonclaw/scheduler/cron.py +310 -0
- pythonclaw/scheduler/heartbeat.py +178 -0
- pythonclaw/server.py +145 -0
- pythonclaw/session_manager.py +104 -0
- pythonclaw/templates/persona/demo_persona.md +2 -0
- pythonclaw/templates/skills/communication/CATEGORY.md +4 -0
- pythonclaw/templates/skills/communication/email/SKILL.md +54 -0
- pythonclaw/templates/skills/communication/email/__pycache__/send_email.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/communication/email/send_email.py +88 -0
- pythonclaw/templates/skills/data/CATEGORY.md +4 -0
- pythonclaw/templates/skills/data/csv_analyzer/SKILL.md +51 -0
- pythonclaw/templates/skills/data/csv_analyzer/__pycache__/analyze.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/csv_analyzer/analyze.py +138 -0
- pythonclaw/templates/skills/data/finance/SKILL.md +41 -0
- pythonclaw/templates/skills/data/finance/__pycache__/fetch_quote.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/finance/fetch_quote.py +118 -0
- pythonclaw/templates/skills/data/news/SKILL.md +39 -0
- pythonclaw/templates/skills/data/news/__pycache__/search_news.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/news/search_news.py +57 -0
- pythonclaw/templates/skills/data/pdf_reader/SKILL.md +40 -0
- pythonclaw/templates/skills/data/pdf_reader/__pycache__/read_pdf.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/pdf_reader/read_pdf.py +113 -0
- pythonclaw/templates/skills/data/scraper/SKILL.md +39 -0
- pythonclaw/templates/skills/data/scraper/__pycache__/scrape.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/scraper/scrape.py +92 -0
- pythonclaw/templates/skills/data/weather/SKILL.md +42 -0
- pythonclaw/templates/skills/data/weather/__pycache__/weather.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/weather/weather.py +142 -0
- pythonclaw/templates/skills/data/youtube/SKILL.md +43 -0
- pythonclaw/templates/skills/data/youtube/__pycache__/youtube_info.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/data/youtube/youtube_info.py +167 -0
- pythonclaw/templates/skills/dev/CATEGORY.md +4 -0
- pythonclaw/templates/skills/dev/code_runner/SKILL.md +46 -0
- pythonclaw/templates/skills/dev/code_runner/__pycache__/run_code.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/dev/code_runner/run_code.py +117 -0
- pythonclaw/templates/skills/dev/github/SKILL.md +52 -0
- pythonclaw/templates/skills/dev/github/__pycache__/gh.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/dev/github/gh.py +165 -0
- pythonclaw/templates/skills/dev/http_request/SKILL.md +40 -0
- pythonclaw/templates/skills/dev/http_request/__pycache__/request.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/dev/http_request/request.py +90 -0
- pythonclaw/templates/skills/google/CATEGORY.md +4 -0
- pythonclaw/templates/skills/google/workspace/SKILL.md +98 -0
- pythonclaw/templates/skills/google/workspace/check_setup.sh +52 -0
- pythonclaw/templates/skills/meta/CATEGORY.md +4 -0
- pythonclaw/templates/skills/meta/skill_creator/SKILL.md +151 -0
- pythonclaw/templates/skills/system/CATEGORY.md +4 -0
- pythonclaw/templates/skills/system/change_persona/SKILL.md +41 -0
- pythonclaw/templates/skills/system/change_setting/SKILL.md +65 -0
- pythonclaw/templates/skills/system/change_setting/__pycache__/update_config.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/system/change_setting/update_config.py +129 -0
- pythonclaw/templates/skills/system/change_soul/SKILL.md +41 -0
- pythonclaw/templates/skills/system/onboarding/SKILL.md +63 -0
- pythonclaw/templates/skills/system/onboarding/__pycache__/write_identity.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/system/onboarding/write_identity.py +218 -0
- pythonclaw/templates/skills/system/random/SKILL.md +33 -0
- pythonclaw/templates/skills/system/random/__pycache__/random_util.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/system/random/random_util.py +45 -0
- pythonclaw/templates/skills/system/time/SKILL.md +33 -0
- pythonclaw/templates/skills/system/time/__pycache__/time_util.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/system/time/time_util.py +81 -0
- pythonclaw/templates/skills/text/CATEGORY.md +4 -0
- pythonclaw/templates/skills/text/translator/SKILL.md +47 -0
- pythonclaw/templates/skills/text/translator/__pycache__/translate.cpython-311.pyc +0 -0
- pythonclaw/templates/skills/text/translator/translate.py +66 -0
- pythonclaw/templates/skills/web/CATEGORY.md +4 -0
- pythonclaw/templates/skills/web/tavily/SKILL.md +61 -0
- pythonclaw/templates/soul/SOUL.md +54 -0
- pythonclaw/web/__init__.py +1 -0
- pythonclaw/web/app.py +585 -0
- pythonclaw/web/static/favicon.png +0 -0
- pythonclaw/web/static/index.html +1318 -0
- pythonclaw/web/static/logo.png +0 -0
- pythonclaw-0.2.0.dist-info/METADATA +410 -0
- pythonclaw-0.2.0.dist-info/RECORD +112 -0
- pythonclaw-0.2.0.dist-info/WHEEL +5 -0
- pythonclaw-0.2.0.dist-info/entry_points.txt +2 -0
- pythonclaw-0.2.0.dist-info/licenses/LICENSE +21 -0
- pythonclaw-0.2.0.dist-info/top_level.txt +1 -0
pythonclaw/onboard.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Interactive onboarding wizard for PythonClaw.
|
|
3
|
+
|
|
4
|
+
Guides a new user through LLM provider selection, API key entry,
|
|
5
|
+
and optional service key configuration. Writes pythonclaw.json.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import getpass
|
|
11
|
+
import json
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
|
|
14
|
+
from . import config
|
|
15
|
+
|
|
16
|
+
# ── ANSI helpers (no external deps) ──────────────────────────────────────────
|
|
17
|
+
|
|
18
|
+
_BOLD = "\033[1m"
|
|
19
|
+
_DIM = "\033[2m"
|
|
20
|
+
_GREEN = "\033[32m"
|
|
21
|
+
_CYAN = "\033[36m"
|
|
22
|
+
_YELLOW = "\033[33m"
|
|
23
|
+
_RED = "\033[31m"
|
|
24
|
+
_RESET = "\033[0m"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _c(text: str, color: str) -> str:
|
|
28
|
+
return f"{color}{text}{_RESET}"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# ── Provider definitions ─────────────────────────────────────────────────────
|
|
32
|
+
|
|
33
|
+
PROVIDERS = [
|
|
34
|
+
{
|
|
35
|
+
"key": "deepseek",
|
|
36
|
+
"name": "DeepSeek",
|
|
37
|
+
"default_model": "deepseek-chat",
|
|
38
|
+
"default_base": "https://api.deepseek.com/v1",
|
|
39
|
+
"env": "DEEPSEEK_API_KEY",
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
"key": "grok",
|
|
43
|
+
"name": "Grok (xAI)",
|
|
44
|
+
"default_model": "grok-3",
|
|
45
|
+
"default_base": "https://api.x.ai/v1",
|
|
46
|
+
"env": "GROK_API_KEY",
|
|
47
|
+
},
|
|
48
|
+
{
|
|
49
|
+
"key": "claude",
|
|
50
|
+
"name": "Claude (Anthropic)",
|
|
51
|
+
"default_model": "claude-sonnet-4-20250514",
|
|
52
|
+
"default_base": None,
|
|
53
|
+
"env": "ANTHROPIC_API_KEY",
|
|
54
|
+
},
|
|
55
|
+
{
|
|
56
|
+
"key": "gemini",
|
|
57
|
+
"name": "Gemini (Google)",
|
|
58
|
+
"default_model": "gemini-2.0-flash",
|
|
59
|
+
"default_base": None,
|
|
60
|
+
"env": "GEMINI_API_KEY",
|
|
61
|
+
},
|
|
62
|
+
{
|
|
63
|
+
"key": "kimi",
|
|
64
|
+
"name": "Kimi (Moonshot)",
|
|
65
|
+
"default_model": "moonshot-v1-128k",
|
|
66
|
+
"default_base": "https://api.moonshot.cn/v1",
|
|
67
|
+
"env": "KIMI_API_KEY",
|
|
68
|
+
},
|
|
69
|
+
{
|
|
70
|
+
"key": "glm",
|
|
71
|
+
"name": "GLM (Zhipu / ChatGLM)",
|
|
72
|
+
"default_model": "glm-4-flash",
|
|
73
|
+
"default_base": "https://open.bigmodel.cn/api/paas/v4/",
|
|
74
|
+
"env": "GLM_API_KEY",
|
|
75
|
+
},
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
# ── Core logic ───────────────────────────────────────────────────────────────
|
|
80
|
+
|
|
81
|
+
def run_onboard(config_path: str | None = None) -> Path:
|
|
82
|
+
"""Run the interactive onboarding wizard. Returns path to saved config."""
|
|
83
|
+
print()
|
|
84
|
+
print(_c(" ╔══════════════════════════════════════╗", _CYAN))
|
|
85
|
+
print(_c(" ║ PythonClaw — Setup Wizard ║", _CYAN))
|
|
86
|
+
print(_c(" ╚══════════════════════════════════════╝", _CYAN))
|
|
87
|
+
print()
|
|
88
|
+
|
|
89
|
+
# Load existing config if present
|
|
90
|
+
cfg = _load_existing(config_path)
|
|
91
|
+
|
|
92
|
+
# 1. Choose LLM provider
|
|
93
|
+
provider = _choose_provider(cfg)
|
|
94
|
+
|
|
95
|
+
# 2. Enter API key
|
|
96
|
+
api_key = _get_api_key(provider, cfg)
|
|
97
|
+
|
|
98
|
+
# 3. Update config
|
|
99
|
+
prov = provider["key"]
|
|
100
|
+
cfg.setdefault("llm", {})
|
|
101
|
+
cfg["llm"]["provider"] = prov
|
|
102
|
+
cfg["llm"].setdefault(prov, {})
|
|
103
|
+
cfg["llm"][prov]["apiKey"] = api_key
|
|
104
|
+
cfg["llm"][prov].setdefault("model", provider["default_model"])
|
|
105
|
+
if provider["default_base"]:
|
|
106
|
+
cfg["llm"][prov].setdefault("baseUrl", provider["default_base"])
|
|
107
|
+
|
|
108
|
+
# 4. Optional keys
|
|
109
|
+
_optional_keys(cfg)
|
|
110
|
+
|
|
111
|
+
# 5. Validate
|
|
112
|
+
_validate_key(cfg, provider)
|
|
113
|
+
|
|
114
|
+
# 6. Save
|
|
115
|
+
out_path = _save_config(cfg, config_path)
|
|
116
|
+
|
|
117
|
+
print()
|
|
118
|
+
print(_c(" ✔ Setup complete!", _GREEN))
|
|
119
|
+
print(f" Config saved to: {_c(str(out_path), _BOLD)}")
|
|
120
|
+
print()
|
|
121
|
+
return out_path
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _load_existing(config_path: str | None) -> dict:
|
|
125
|
+
"""Load existing config or return empty dict."""
|
|
126
|
+
try:
|
|
127
|
+
config.load(config_path)
|
|
128
|
+
return config.as_dict()
|
|
129
|
+
except Exception:
|
|
130
|
+
return {}
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def _choose_provider(cfg: dict) -> dict:
|
|
134
|
+
current = cfg.get("llm", {}).get("provider", "")
|
|
135
|
+
print(_c(" Choose your LLM provider:", _BOLD))
|
|
136
|
+
print()
|
|
137
|
+
for i, p in enumerate(PROVIDERS, 1):
|
|
138
|
+
marker = _c(" (current)", _DIM) if p["key"] == current else ""
|
|
139
|
+
print(f" {_c(str(i), _CYAN)}. {p['name']}{marker}")
|
|
140
|
+
print()
|
|
141
|
+
|
|
142
|
+
while True:
|
|
143
|
+
default_hint = ""
|
|
144
|
+
if current:
|
|
145
|
+
idx = next((i for i, p in enumerate(PROVIDERS) if p["key"] == current), None)
|
|
146
|
+
if idx is not None:
|
|
147
|
+
default_hint = f" [{idx + 1}]"
|
|
148
|
+
|
|
149
|
+
choice = input(f" Enter number (1-{len(PROVIDERS)}){default_hint}: ").strip()
|
|
150
|
+
if not choice and current:
|
|
151
|
+
return next(p for p in PROVIDERS if p["key"] == current)
|
|
152
|
+
try:
|
|
153
|
+
n = int(choice)
|
|
154
|
+
if 1 <= n <= len(PROVIDERS):
|
|
155
|
+
selected = PROVIDERS[n - 1]
|
|
156
|
+
print(f" → {_c(selected['name'], _GREEN)}")
|
|
157
|
+
print()
|
|
158
|
+
return selected
|
|
159
|
+
except ValueError:
|
|
160
|
+
pass
|
|
161
|
+
print(_c(" Invalid choice, try again.", _RED))
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _get_api_key(provider: dict, cfg: dict) -> str:
|
|
165
|
+
existing = cfg.get("llm", {}).get(provider["key"], {}).get("apiKey", "")
|
|
166
|
+
has_existing = bool(existing) and existing != ""
|
|
167
|
+
|
|
168
|
+
hint = ""
|
|
169
|
+
if has_existing:
|
|
170
|
+
masked = existing[:4] + "****" + existing[-4:] if len(existing) > 8 else "****"
|
|
171
|
+
hint = f" (current: {masked}, press Enter to keep)"
|
|
172
|
+
|
|
173
|
+
print(f" {provider['name']} API Key{hint}")
|
|
174
|
+
key = getpass.getpass(" API Key: ").strip()
|
|
175
|
+
|
|
176
|
+
if not key and has_existing:
|
|
177
|
+
print(" → Keeping existing key")
|
|
178
|
+
return existing
|
|
179
|
+
if not key:
|
|
180
|
+
print(_c(" API key is required.", _RED))
|
|
181
|
+
return _get_api_key(provider, cfg)
|
|
182
|
+
|
|
183
|
+
print(f" → Key set ({key[:4]}****)")
|
|
184
|
+
print()
|
|
185
|
+
return key
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
def _optional_keys(cfg: dict) -> None:
|
|
189
|
+
print(_c(" Optional services (press Enter to skip):", _DIM))
|
|
190
|
+
print()
|
|
191
|
+
|
|
192
|
+
# Tavily
|
|
193
|
+
tavily_existing = cfg.get("tavily", {}).get("apiKey", "")
|
|
194
|
+
if not tavily_existing:
|
|
195
|
+
tavily = input(" Tavily API Key (web search): ").strip()
|
|
196
|
+
if tavily:
|
|
197
|
+
cfg.setdefault("tavily", {})["apiKey"] = tavily
|
|
198
|
+
print(" → Tavily key set")
|
|
199
|
+
|
|
200
|
+
# Deepgram
|
|
201
|
+
dg_existing = cfg.get("deepgram", {}).get("apiKey", "")
|
|
202
|
+
if not dg_existing:
|
|
203
|
+
dg = input(" Deepgram API Key (voice input): ").strip()
|
|
204
|
+
if dg:
|
|
205
|
+
cfg.setdefault("deepgram", {})["apiKey"] = dg
|
|
206
|
+
print(" → Deepgram key set")
|
|
207
|
+
|
|
208
|
+
# SkillHub
|
|
209
|
+
sh_existing = cfg.get("skillhub", {}).get("apiKey", "")
|
|
210
|
+
if not sh_existing:
|
|
211
|
+
sh = input(" SkillHub API Key (marketplace): ").strip()
|
|
212
|
+
if sh:
|
|
213
|
+
cfg.setdefault("skillhub", {})["apiKey"] = sh
|
|
214
|
+
print(" → SkillHub key set")
|
|
215
|
+
|
|
216
|
+
print()
|
|
217
|
+
|
|
218
|
+
|
|
219
|
+
def _validate_key(cfg: dict, provider: dict) -> None:
|
|
220
|
+
"""Make a quick test call to validate the API key."""
|
|
221
|
+
print(f" Validating {provider['name']} API key...", end=" ", flush=True)
|
|
222
|
+
|
|
223
|
+
prov_key = provider["key"]
|
|
224
|
+
api_key = cfg["llm"][prov_key]["apiKey"]
|
|
225
|
+
|
|
226
|
+
try:
|
|
227
|
+
if prov_key in ("deepseek", "grok", "kimi", "glm"):
|
|
228
|
+
from .core.llm.openai_compatible import OpenAICompatibleProvider
|
|
229
|
+
base_url = cfg["llm"][prov_key].get("baseUrl", provider["default_base"])
|
|
230
|
+
model = cfg["llm"][prov_key].get("model", provider["default_model"])
|
|
231
|
+
p = OpenAICompatibleProvider(api_key=api_key, base_url=base_url, model_name=model)
|
|
232
|
+
p.chat([{"role": "user", "content": "hi"}], max_tokens=5)
|
|
233
|
+
elif prov_key == "claude":
|
|
234
|
+
from .core.llm.anthropic_client import AnthropicProvider
|
|
235
|
+
p = AnthropicProvider(api_key=api_key)
|
|
236
|
+
p.chat([{"role": "user", "content": "hi"}], max_tokens=5)
|
|
237
|
+
elif prov_key == "gemini":
|
|
238
|
+
from .core.llm.gemini_client import GeminiProvider
|
|
239
|
+
p = GeminiProvider(api_key=api_key)
|
|
240
|
+
p.chat([{"role": "user", "content": "hi"}], max_tokens=5)
|
|
241
|
+
else:
|
|
242
|
+
print(_c("skipped (unknown provider type)", _YELLOW))
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
print(_c("✔ Valid!", _GREEN))
|
|
246
|
+
except Exception as exc:
|
|
247
|
+
err_str = str(exc)
|
|
248
|
+
if len(err_str) > 100:
|
|
249
|
+
err_str = err_str[:100] + "..."
|
|
250
|
+
print(_c(f"✘ {err_str}", _RED))
|
|
251
|
+
print(_c(" You can fix this later in pythonclaw.json or the web dashboard.", _DIM))
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def _save_config(cfg: dict, config_path: str | None) -> Path:
|
|
255
|
+
"""Write config to disk."""
|
|
256
|
+
if config_path:
|
|
257
|
+
out = Path(config_path)
|
|
258
|
+
else:
|
|
259
|
+
out = Path.cwd() / "pythonclaw.json"
|
|
260
|
+
|
|
261
|
+
# Ensure default sections exist
|
|
262
|
+
cfg.setdefault("channels", {"telegram": {"token": "", "allowedUsers": []}, "discord": {"token": "", "allowedUsers": [], "allowedChannels": []}})
|
|
263
|
+
cfg.setdefault("tavily", {}).setdefault("apiKey", "")
|
|
264
|
+
cfg.setdefault("deepgram", {}).setdefault("apiKey", "")
|
|
265
|
+
cfg.setdefault("skillhub", {}).setdefault("apiKey", "")
|
|
266
|
+
cfg.setdefault("heartbeat", {"intervalSec": 60, "alertChatId": None})
|
|
267
|
+
cfg.setdefault("memory", {"dir": None})
|
|
268
|
+
cfg.setdefault("web", {"host": "0.0.0.0", "port": 7788})
|
|
269
|
+
cfg.setdefault("skills", {})
|
|
270
|
+
cfg.setdefault("agent", {"autoCompactThreshold": 0, "verbose": True})
|
|
271
|
+
|
|
272
|
+
out.parent.mkdir(parents=True, exist_ok=True)
|
|
273
|
+
out.write_text(json.dumps(cfg, indent=2, ensure_ascii=False) + "\n", encoding="utf-8")
|
|
274
|
+
config.load(str(out), force=True)
|
|
275
|
+
return out
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def needs_onboard(config_path: str | None = None) -> bool:
|
|
279
|
+
"""Check if onboarding is needed (no config or no API key)."""
|
|
280
|
+
try:
|
|
281
|
+
config.load(config_path)
|
|
282
|
+
except Exception:
|
|
283
|
+
return True
|
|
284
|
+
|
|
285
|
+
provider = config.get_str("llm", "provider", default="")
|
|
286
|
+
if not provider:
|
|
287
|
+
return True
|
|
288
|
+
|
|
289
|
+
api_key = config.get_str("llm", provider, "apiKey", default="")
|
|
290
|
+
return not api_key
|
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Cron-based LLM job scheduler for pythonclaw.
|
|
3
|
+
|
|
4
|
+
Two sources of jobs
|
|
5
|
+
-------------------
|
|
6
|
+
1. Static jobs — defined in context/cron/jobs.yaml (human-configured)
|
|
7
|
+
2. Dynamic jobs — added at runtime by the Agent via cron_add / cron_remove
|
|
8
|
+
tool calls; persisted to context/cron/dynamic_jobs.json
|
|
9
|
+
|
|
10
|
+
Session isolation
|
|
11
|
+
-----------------
|
|
12
|
+
Each job gets its own persistent session via the shared SessionManager:
|
|
13
|
+
session_id = "cron:{job_id}"
|
|
14
|
+
|
|
15
|
+
This means:
|
|
16
|
+
- Jobs don't share context with each other or with user conversations.
|
|
17
|
+
- The same job accumulates history across multiple runs.
|
|
18
|
+
- Sessions can be reset via SessionManager.reset("cron:{job_id}").
|
|
19
|
+
|
|
20
|
+
Agent cron tools
|
|
21
|
+
----------------
|
|
22
|
+
Expose these to the Agent via agent.py:
|
|
23
|
+
cron_add(job_id, cron, prompt, deliver_to_chat_id=None)
|
|
24
|
+
cron_remove(job_id)
|
|
25
|
+
cron_list()
|
|
26
|
+
|
|
27
|
+
jobs.yaml format
|
|
28
|
+
----------------
|
|
29
|
+
jobs:
|
|
30
|
+
- id: daily_summary
|
|
31
|
+
cron: "0 9 * * *"
|
|
32
|
+
prompt: "Summarise my tasks and memory for today."
|
|
33
|
+
deliver_to: telegram
|
|
34
|
+
chat_id: 123456789
|
|
35
|
+
enabled: true
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
from __future__ import annotations
|
|
39
|
+
|
|
40
|
+
import json
|
|
41
|
+
import logging
|
|
42
|
+
import os
|
|
43
|
+
from typing import TYPE_CHECKING
|
|
44
|
+
|
|
45
|
+
import yaml
|
|
46
|
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
|
47
|
+
from apscheduler.triggers.cron import CronTrigger
|
|
48
|
+
|
|
49
|
+
if TYPE_CHECKING:
|
|
50
|
+
from ..channels.telegram_bot import TelegramBot
|
|
51
|
+
from ..session_manager import SessionManager
|
|
52
|
+
|
|
53
|
+
logger = logging.getLogger(__name__)
|
|
54
|
+
|
|
55
|
+
DYNAMIC_JOBS_FILE = os.path.join("context", "cron", "dynamic_jobs.json")
|
|
56
|
+
|
|
57
|
+
DEFAULT_JOBS_PATH = os.path.join("context", "cron", "jobs.yaml")
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class CronScheduler:
|
|
61
|
+
"""
|
|
62
|
+
Loads job definitions from YAML and schedules them with APScheduler.
|
|
63
|
+
|
|
64
|
+
Each job runs inside its own session ("cron:{job_id}") managed by the
|
|
65
|
+
shared SessionManager, keeping job context isolated and persistent.
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
def __init__(
|
|
69
|
+
self,
|
|
70
|
+
session_manager: "SessionManager",
|
|
71
|
+
jobs_path: str = DEFAULT_JOBS_PATH,
|
|
72
|
+
telegram_bot: "TelegramBot | None" = None,
|
|
73
|
+
) -> None:
|
|
74
|
+
self._sm = session_manager
|
|
75
|
+
self._jobs_path = jobs_path
|
|
76
|
+
self._telegram_bot = telegram_bot
|
|
77
|
+
self._scheduler = AsyncIOScheduler()
|
|
78
|
+
|
|
79
|
+
# ── YAML loading ─────────────────────────────────────────────────────────
|
|
80
|
+
|
|
81
|
+
def _load_jobs(self) -> list[dict]:
|
|
82
|
+
if not os.path.exists(self._jobs_path):
|
|
83
|
+
logger.info("[CronScheduler] No jobs file found at %s — skipping.", self._jobs_path)
|
|
84
|
+
return []
|
|
85
|
+
with open(self._jobs_path, "r", encoding="utf-8") as f:
|
|
86
|
+
data = yaml.safe_load(f) or {}
|
|
87
|
+
return data.get("jobs", [])
|
|
88
|
+
|
|
89
|
+
# ── Job execution ────────────────────────────────────────────────────────
|
|
90
|
+
|
|
91
|
+
async def _run_job(
|
|
92
|
+
self,
|
|
93
|
+
job_id: str,
|
|
94
|
+
prompt: str,
|
|
95
|
+
deliver_to: str | None,
|
|
96
|
+
chat_id: int | None,
|
|
97
|
+
) -> None:
|
|
98
|
+
session_id = f"cron:{job_id}"
|
|
99
|
+
logger.info("[CronScheduler] Running job '%s' (session='%s')", job_id, session_id)
|
|
100
|
+
|
|
101
|
+
agent = self._sm.get_or_create(session_id)
|
|
102
|
+
try:
|
|
103
|
+
response = agent.chat(prompt)
|
|
104
|
+
logger.info("[CronScheduler] Job '%s' completed.", job_id)
|
|
105
|
+
except Exception as exc:
|
|
106
|
+
logger.exception("[CronScheduler] Job '%s' failed: %s", job_id, exc)
|
|
107
|
+
response = f"[Cron job '{job_id}' failed]\n{exc}"
|
|
108
|
+
|
|
109
|
+
if deliver_to == "telegram" and chat_id and self._telegram_bot:
|
|
110
|
+
try:
|
|
111
|
+
header = f"📋 Cron job: {job_id}\n\n"
|
|
112
|
+
await self._telegram_bot.send_message(chat_id, header + (response or ""))
|
|
113
|
+
except Exception as exc:
|
|
114
|
+
logger.error(
|
|
115
|
+
"[CronScheduler] Failed to deliver job '%s' to Telegram: %s", job_id, exc
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# ── Scheduler lifecycle ──────────────────────────────────────────────────
|
|
119
|
+
|
|
120
|
+
def load_and_register_jobs(self) -> int:
|
|
121
|
+
"""Parse jobs.yaml and register enabled jobs with APScheduler. Returns count."""
|
|
122
|
+
jobs = self._load_jobs()
|
|
123
|
+
registered = 0
|
|
124
|
+
for job in jobs:
|
|
125
|
+
job_id = job.get("id", "unnamed")
|
|
126
|
+
if not job.get("enabled", True):
|
|
127
|
+
logger.info("[CronScheduler] Skipping disabled job '%s'", job_id)
|
|
128
|
+
continue
|
|
129
|
+
|
|
130
|
+
cron_expr = job.get("cron")
|
|
131
|
+
prompt = job.get("prompt")
|
|
132
|
+
if not cron_expr or not prompt:
|
|
133
|
+
logger.warning(
|
|
134
|
+
"[CronScheduler] Job '%s' is missing 'cron' or 'prompt' — skipped.", job_id
|
|
135
|
+
)
|
|
136
|
+
continue
|
|
137
|
+
|
|
138
|
+
deliver_to = job.get("deliver_to")
|
|
139
|
+
chat_id = job.get("chat_id")
|
|
140
|
+
|
|
141
|
+
trigger = _parse_cron(cron_expr)
|
|
142
|
+
self._scheduler.add_job(
|
|
143
|
+
self._run_job,
|
|
144
|
+
trigger=trigger,
|
|
145
|
+
id=job_id,
|
|
146
|
+
kwargs={
|
|
147
|
+
"job_id": job_id,
|
|
148
|
+
"prompt": prompt,
|
|
149
|
+
"deliver_to": deliver_to,
|
|
150
|
+
"chat_id": chat_id,
|
|
151
|
+
},
|
|
152
|
+
replace_existing=True,
|
|
153
|
+
)
|
|
154
|
+
logger.info(
|
|
155
|
+
"[CronScheduler] Registered job '%s' (session='cron:%s') cron='%s'",
|
|
156
|
+
job_id, job_id, cron_expr,
|
|
157
|
+
)
|
|
158
|
+
registered += 1
|
|
159
|
+
|
|
160
|
+
return registered
|
|
161
|
+
|
|
162
|
+
def start(self) -> None:
|
|
163
|
+
"""Start the APScheduler background scheduler (static + dynamic jobs)."""
|
|
164
|
+
static_count = self.load_and_register_jobs()
|
|
165
|
+
dynamic_count = self._register_dynamic_jobs()
|
|
166
|
+
total = static_count + dynamic_count
|
|
167
|
+
if total == 0:
|
|
168
|
+
logger.info("[CronScheduler] No jobs to schedule — scheduler will start but be idle.")
|
|
169
|
+
self._scheduler.start()
|
|
170
|
+
logger.info(
|
|
171
|
+
"[CronScheduler] Scheduler started: %d static + %d dynamic job(s).",
|
|
172
|
+
static_count, dynamic_count,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
def stop(self) -> None:
|
|
176
|
+
if self._scheduler.running:
|
|
177
|
+
self._scheduler.shutdown(wait=False)
|
|
178
|
+
logger.info("[CronScheduler] Scheduler stopped.")
|
|
179
|
+
|
|
180
|
+
def reload_jobs(self) -> int:
|
|
181
|
+
"""Hot-reload static jobs from the YAML file without stopping the scheduler."""
|
|
182
|
+
self._scheduler.remove_all_jobs()
|
|
183
|
+
return self.load_and_register_jobs()
|
|
184
|
+
|
|
185
|
+
# ── Dynamic job management (called by Agent cron tools) ──────────────────
|
|
186
|
+
|
|
187
|
+
def _load_dynamic_jobs(self) -> dict[str, dict]:
|
|
188
|
+
"""Load persisted dynamic jobs from JSON. Returns {job_id: job_dict}."""
|
|
189
|
+
if not os.path.exists(DYNAMIC_JOBS_FILE):
|
|
190
|
+
return {}
|
|
191
|
+
try:
|
|
192
|
+
with open(DYNAMIC_JOBS_FILE, "r", encoding="utf-8") as f:
|
|
193
|
+
return json.load(f)
|
|
194
|
+
except (OSError, json.JSONDecodeError) as exc:
|
|
195
|
+
logger.error("[CronScheduler] Failed to load dynamic jobs: %s", exc)
|
|
196
|
+
return {}
|
|
197
|
+
|
|
198
|
+
def _save_dynamic_jobs(self, jobs: dict[str, dict]) -> None:
|
|
199
|
+
os.makedirs(os.path.dirname(DYNAMIC_JOBS_FILE), exist_ok=True)
|
|
200
|
+
with open(DYNAMIC_JOBS_FILE, "w", encoding="utf-8") as f:
|
|
201
|
+
json.dump(jobs, f, indent=2, ensure_ascii=False)
|
|
202
|
+
|
|
203
|
+
def _register_dynamic_jobs(self) -> int:
|
|
204
|
+
"""Register all persisted dynamic jobs with the scheduler."""
|
|
205
|
+
jobs = self._load_dynamic_jobs()
|
|
206
|
+
registered = 0
|
|
207
|
+
for job_id, job in jobs.items():
|
|
208
|
+
try:
|
|
209
|
+
self._scheduler.add_job(
|
|
210
|
+
self._run_job,
|
|
211
|
+
trigger=_parse_cron(job["cron"]),
|
|
212
|
+
id=job_id,
|
|
213
|
+
kwargs={
|
|
214
|
+
"job_id": job_id,
|
|
215
|
+
"prompt": job["prompt"],
|
|
216
|
+
"deliver_to": job.get("deliver_to"),
|
|
217
|
+
"chat_id": job.get("chat_id"),
|
|
218
|
+
},
|
|
219
|
+
replace_existing=True,
|
|
220
|
+
)
|
|
221
|
+
registered += 1
|
|
222
|
+
logger.info("[CronScheduler] Restored dynamic job '%s'", job_id)
|
|
223
|
+
except Exception as exc:
|
|
224
|
+
logger.error("[CronScheduler] Failed to restore dynamic job '%s': %s", job_id, exc)
|
|
225
|
+
return registered
|
|
226
|
+
|
|
227
|
+
def add_dynamic_job(
|
|
228
|
+
self,
|
|
229
|
+
job_id: str,
|
|
230
|
+
cron_expr: str,
|
|
231
|
+
prompt: str,
|
|
232
|
+
deliver_to: str | None = None,
|
|
233
|
+
chat_id: int | None = None,
|
|
234
|
+
) -> str:
|
|
235
|
+
"""
|
|
236
|
+
Add a new dynamic job (called from the Agent cron_add tool).
|
|
237
|
+
Persists to dynamic_jobs.json so it survives restarts.
|
|
238
|
+
"""
|
|
239
|
+
try:
|
|
240
|
+
trigger = _parse_cron(cron_expr)
|
|
241
|
+
except ValueError as exc:
|
|
242
|
+
return f"Invalid cron expression: {exc}"
|
|
243
|
+
|
|
244
|
+
self._scheduler.add_job(
|
|
245
|
+
self._run_job,
|
|
246
|
+
trigger=trigger,
|
|
247
|
+
id=job_id,
|
|
248
|
+
kwargs={
|
|
249
|
+
"job_id": job_id,
|
|
250
|
+
"prompt": prompt,
|
|
251
|
+
"deliver_to": deliver_to,
|
|
252
|
+
"chat_id": chat_id,
|
|
253
|
+
},
|
|
254
|
+
replace_existing=True,
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
jobs = self._load_dynamic_jobs()
|
|
258
|
+
jobs[job_id] = {
|
|
259
|
+
"cron": cron_expr,
|
|
260
|
+
"prompt": prompt,
|
|
261
|
+
"deliver_to": deliver_to,
|
|
262
|
+
"chat_id": chat_id,
|
|
263
|
+
}
|
|
264
|
+
self._save_dynamic_jobs(jobs)
|
|
265
|
+
logger.info("[CronScheduler] Added dynamic job '%s' (cron='%s')", job_id, cron_expr)
|
|
266
|
+
return f"Job '{job_id}' scheduled: runs '{cron_expr}'. Session: cron:{job_id}."
|
|
267
|
+
|
|
268
|
+
def remove_dynamic_job(self, job_id: str) -> str:
|
|
269
|
+
"""Remove a dynamic job (called from the Agent cron_remove tool)."""
|
|
270
|
+
jobs = self._load_dynamic_jobs()
|
|
271
|
+
if job_id not in jobs and not self._scheduler.get_job(job_id):
|
|
272
|
+
return f"Job '{job_id}' not found."
|
|
273
|
+
try:
|
|
274
|
+
self._scheduler.remove_job(job_id)
|
|
275
|
+
except Exception:
|
|
276
|
+
pass
|
|
277
|
+
jobs.pop(job_id, None)
|
|
278
|
+
self._save_dynamic_jobs(jobs)
|
|
279
|
+
logger.info("[CronScheduler] Removed dynamic job '%s'", job_id)
|
|
280
|
+
return f"Job '{job_id}' removed."
|
|
281
|
+
|
|
282
|
+
def list_jobs(self) -> str:
|
|
283
|
+
"""Return a human-readable list of all active jobs (called from cron_list tool)."""
|
|
284
|
+
scheduler_jobs = self._scheduler.get_jobs()
|
|
285
|
+
dynamic = self._load_dynamic_jobs()
|
|
286
|
+
if not scheduler_jobs:
|
|
287
|
+
return "No scheduled jobs."
|
|
288
|
+
lines = []
|
|
289
|
+
for job in scheduler_jobs:
|
|
290
|
+
tag = "[dynamic]" if job.id in dynamic else "[static]"
|
|
291
|
+
next_run = job.next_run_time.strftime("%Y-%m-%d %H:%M %Z") if job.next_run_time else "paused"
|
|
292
|
+
lines.append(f" {tag} {job.id} | next: {next_run}")
|
|
293
|
+
return "Active cron jobs:\n" + "\n".join(lines)
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
# ── Helpers ──────────────────────────────────────────────────────────────────
|
|
297
|
+
|
|
298
|
+
def _parse_cron(expr: str) -> CronTrigger:
|
|
299
|
+
"""Convert a 5-field cron expression string into an APScheduler CronTrigger."""
|
|
300
|
+
parts = expr.strip().split()
|
|
301
|
+
if len(parts) != 5:
|
|
302
|
+
raise ValueError(f"Invalid cron expression (expected 5 fields): '{expr}'")
|
|
303
|
+
minute, hour, day, month, day_of_week = parts
|
|
304
|
+
return CronTrigger(
|
|
305
|
+
minute=minute,
|
|
306
|
+
hour=hour,
|
|
307
|
+
day=day,
|
|
308
|
+
month=month,
|
|
309
|
+
day_of_week=day_of_week,
|
|
310
|
+
)
|