kyber-chat 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kyber/__init__.py +6 -0
- kyber/__main__.py +8 -0
- kyber/agent/__init__.py +8 -0
- kyber/agent/context.py +224 -0
- kyber/agent/loop.py +687 -0
- kyber/agent/memory.py +109 -0
- kyber/agent/skills.py +244 -0
- kyber/agent/subagent.py +379 -0
- kyber/agent/tools/__init__.py +6 -0
- kyber/agent/tools/base.py +102 -0
- kyber/agent/tools/filesystem.py +191 -0
- kyber/agent/tools/message.py +86 -0
- kyber/agent/tools/registry.py +73 -0
- kyber/agent/tools/shell.py +141 -0
- kyber/agent/tools/spawn.py +65 -0
- kyber/agent/tools/task_status.py +53 -0
- kyber/agent/tools/web.py +163 -0
- kyber/bridge/package.json +26 -0
- kyber/bridge/src/index.ts +50 -0
- kyber/bridge/src/server.ts +104 -0
- kyber/bridge/src/types.d.ts +3 -0
- kyber/bridge/src/whatsapp.ts +185 -0
- kyber/bridge/tsconfig.json +16 -0
- kyber/bus/__init__.py +6 -0
- kyber/bus/events.py +37 -0
- kyber/bus/queue.py +81 -0
- kyber/channels/__init__.py +6 -0
- kyber/channels/base.py +121 -0
- kyber/channels/discord.py +304 -0
- kyber/channels/feishu.py +263 -0
- kyber/channels/manager.py +161 -0
- kyber/channels/telegram.py +302 -0
- kyber/channels/whatsapp.py +141 -0
- kyber/cli/__init__.py +1 -0
- kyber/cli/commands.py +736 -0
- kyber/config/__init__.py +6 -0
- kyber/config/loader.py +95 -0
- kyber/config/schema.py +205 -0
- kyber/cron/__init__.py +6 -0
- kyber/cron/service.py +346 -0
- kyber/cron/types.py +59 -0
- kyber/dashboard/__init__.py +5 -0
- kyber/dashboard/server.py +122 -0
- kyber/dashboard/static/app.js +458 -0
- kyber/dashboard/static/favicon.png +0 -0
- kyber/dashboard/static/index.html +107 -0
- kyber/dashboard/static/kyber_logo.png +0 -0
- kyber/dashboard/static/styles.css +608 -0
- kyber/heartbeat/__init__.py +5 -0
- kyber/heartbeat/service.py +130 -0
- kyber/providers/__init__.py +6 -0
- kyber/providers/base.py +69 -0
- kyber/providers/litellm_provider.py +227 -0
- kyber/providers/transcription.py +65 -0
- kyber/session/__init__.py +5 -0
- kyber/session/manager.py +202 -0
- kyber/skills/README.md +47 -0
- kyber/skills/github/SKILL.md +48 -0
- kyber/skills/skill-creator/SKILL.md +371 -0
- kyber/skills/summarize/SKILL.md +67 -0
- kyber/skills/tmux/SKILL.md +121 -0
- kyber/skills/tmux/scripts/find-sessions.sh +112 -0
- kyber/skills/tmux/scripts/wait-for-text.sh +83 -0
- kyber/skills/weather/SKILL.md +49 -0
- kyber/utils/__init__.py +5 -0
- kyber/utils/helpers.py +91 -0
- kyber_chat-1.0.0.dist-info/METADATA +35 -0
- kyber_chat-1.0.0.dist-info/RECORD +71 -0
- kyber_chat-1.0.0.dist-info/WHEEL +4 -0
- kyber_chat-1.0.0.dist-info/entry_points.txt +2 -0
- kyber_chat-1.0.0.dist-info/licenses/LICENSE +21 -0
kyber/config/__init__.py
ADDED
kyber/config/loader.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
"""Configuration loading utilities."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from kyber.config.schema import Config
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def get_config_path() -> Path:
|
|
11
|
+
"""Get the default configuration file path."""
|
|
12
|
+
return Path.home() / ".kyber" / "config.json"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def get_data_dir() -> Path:
|
|
16
|
+
"""Get the kyber data directory."""
|
|
17
|
+
from kyber.utils.helpers import get_data_path
|
|
18
|
+
return get_data_path()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def load_config(config_path: Path | None = None) -> Config:
|
|
22
|
+
"""
|
|
23
|
+
Load configuration from file or create default.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
config_path: Optional path to config file. Uses default if not provided.
|
|
27
|
+
|
|
28
|
+
Returns:
|
|
29
|
+
Loaded configuration object.
|
|
30
|
+
"""
|
|
31
|
+
path = config_path or get_config_path()
|
|
32
|
+
|
|
33
|
+
if path.exists():
|
|
34
|
+
try:
|
|
35
|
+
with open(path) as f:
|
|
36
|
+
data = json.load(f)
|
|
37
|
+
return Config.model_validate(convert_keys(data))
|
|
38
|
+
except (json.JSONDecodeError, ValueError) as e:
|
|
39
|
+
print(f"Warning: Failed to load config from {path}: {e}")
|
|
40
|
+
print("Using default configuration.")
|
|
41
|
+
|
|
42
|
+
return Config()
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def save_config(config: Config, config_path: Path | None = None) -> None:
|
|
46
|
+
"""
|
|
47
|
+
Save configuration to file.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
config: Configuration to save.
|
|
51
|
+
config_path: Optional path to save to. Uses default if not provided.
|
|
52
|
+
"""
|
|
53
|
+
path = config_path or get_config_path()
|
|
54
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
55
|
+
|
|
56
|
+
# Convert to camelCase format
|
|
57
|
+
data = config.model_dump()
|
|
58
|
+
data = convert_to_camel(data)
|
|
59
|
+
|
|
60
|
+
with open(path, "w") as f:
|
|
61
|
+
json.dump(data, f, indent=2)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def convert_keys(data: Any) -> Any:
|
|
65
|
+
"""Convert camelCase keys to snake_case for Pydantic."""
|
|
66
|
+
if isinstance(data, dict):
|
|
67
|
+
return {camel_to_snake(k): convert_keys(v) for k, v in data.items()}
|
|
68
|
+
if isinstance(data, list):
|
|
69
|
+
return [convert_keys(item) for item in data]
|
|
70
|
+
return data
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def convert_to_camel(data: Any) -> Any:
|
|
74
|
+
"""Convert snake_case keys to camelCase."""
|
|
75
|
+
if isinstance(data, dict):
|
|
76
|
+
return {snake_to_camel(k): convert_to_camel(v) for k, v in data.items()}
|
|
77
|
+
if isinstance(data, list):
|
|
78
|
+
return [convert_to_camel(item) for item in data]
|
|
79
|
+
return data
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def camel_to_snake(name: str) -> str:
|
|
83
|
+
"""Convert camelCase to snake_case."""
|
|
84
|
+
result = []
|
|
85
|
+
for i, char in enumerate(name):
|
|
86
|
+
if char.isupper() and i > 0:
|
|
87
|
+
result.append("_")
|
|
88
|
+
result.append(char.lower())
|
|
89
|
+
return "".join(result)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def snake_to_camel(name: str) -> str:
|
|
93
|
+
"""Convert snake_case to camelCase."""
|
|
94
|
+
components = name.split("_")
|
|
95
|
+
return components[0] + "".join(x.title() for x in components[1:])
|
kyber/config/schema.py
ADDED
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
"""Configuration schema using Pydantic."""
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
from pydantic_settings import BaseSettings
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class WhatsAppConfig(BaseModel):
|
|
9
|
+
"""WhatsApp channel configuration."""
|
|
10
|
+
enabled: bool = False
|
|
11
|
+
bridge_url: str = "ws://localhost:3001"
|
|
12
|
+
allow_from: list[str] = Field(default_factory=list) # Allowed phone numbers
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class TelegramConfig(BaseModel):
|
|
16
|
+
"""Telegram channel configuration."""
|
|
17
|
+
enabled: bool = False
|
|
18
|
+
token: str = "" # Bot token from @BotFather
|
|
19
|
+
allow_from: list[str] = Field(default_factory=list) # Allowed user IDs or usernames
|
|
20
|
+
proxy: str | None = None # HTTP/SOCKS5 proxy URL, e.g. "http://127.0.0.1:7890" or "socks5://127.0.0.1:1080"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class FeishuConfig(BaseModel):
|
|
24
|
+
"""Feishu/Lark channel configuration using WebSocket long connection."""
|
|
25
|
+
enabled: bool = False
|
|
26
|
+
app_id: str = "" # App ID from Feishu Open Platform
|
|
27
|
+
app_secret: str = "" # App Secret from Feishu Open Platform
|
|
28
|
+
encrypt_key: str = "" # Encrypt Key for event subscription (optional)
|
|
29
|
+
verification_token: str = "" # Verification Token for event subscription (optional)
|
|
30
|
+
allow_from: list[str] = Field(default_factory=list) # Allowed user open_ids
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class DiscordConfig(BaseModel):
|
|
34
|
+
"""Discord channel configuration."""
|
|
35
|
+
enabled: bool = False
|
|
36
|
+
token: str = "" # Bot token from Discord Developer Portal
|
|
37
|
+
allow_from: list[str] = Field(default_factory=list) # Allowed user IDs or usernames
|
|
38
|
+
allow_guilds: list[str] = Field(default_factory=list) # Allowed guild IDs (servers)
|
|
39
|
+
allow_channels: list[str] = Field(default_factory=list) # Allowed channel IDs
|
|
40
|
+
require_mention_in_guilds: bool = True # Only respond in guilds when mentioned/replied
|
|
41
|
+
max_attachment_mb: int = 20 # Max attachment size to download
|
|
42
|
+
typing_indicator: bool = True # Show "typing" while processing
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class ChannelsConfig(BaseModel):
|
|
46
|
+
"""Configuration for chat channels."""
|
|
47
|
+
whatsapp: WhatsAppConfig = Field(default_factory=WhatsAppConfig)
|
|
48
|
+
telegram: TelegramConfig = Field(default_factory=TelegramConfig)
|
|
49
|
+
feishu: FeishuConfig = Field(default_factory=FeishuConfig)
|
|
50
|
+
discord: DiscordConfig = Field(default_factory=DiscordConfig)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class AgentDefaults(BaseModel):
|
|
54
|
+
"""Default agent configuration."""
|
|
55
|
+
workspace: str = "~/.kyber/workspace"
|
|
56
|
+
model: str = "google/gemini-2.5-flash-preview"
|
|
57
|
+
provider: str = "openrouter" # Default provider
|
|
58
|
+
max_tokens: int = 8192
|
|
59
|
+
temperature: float = 0.7
|
|
60
|
+
max_tool_iterations: int = 20
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class AgentsConfig(BaseModel):
|
|
64
|
+
"""Agent configuration."""
|
|
65
|
+
defaults: AgentDefaults = Field(default_factory=AgentDefaults)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class ProviderConfig(BaseModel):
|
|
69
|
+
"""LLM provider configuration."""
|
|
70
|
+
api_key: str = ""
|
|
71
|
+
api_base: str | None = None
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class ProvidersConfig(BaseModel):
|
|
75
|
+
"""Configuration for LLM providers."""
|
|
76
|
+
anthropic: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
77
|
+
openai: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
78
|
+
openrouter: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
79
|
+
deepseek: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
80
|
+
groq: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
81
|
+
zhipu: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
82
|
+
vllm: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
83
|
+
gemini: ProviderConfig = Field(default_factory=ProviderConfig)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class GatewayConfig(BaseModel):
|
|
87
|
+
"""Gateway/server configuration."""
|
|
88
|
+
host: str = "0.0.0.0"
|
|
89
|
+
port: int = 18790
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class WebSearchConfig(BaseModel):
|
|
93
|
+
"""Web search tool configuration."""
|
|
94
|
+
api_key: str = "" # Brave Search API key
|
|
95
|
+
max_results: int = 5
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
class WebToolsConfig(BaseModel):
|
|
99
|
+
"""Web tools configuration."""
|
|
100
|
+
search: WebSearchConfig = Field(default_factory=WebSearchConfig)
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class DashboardConfig(BaseModel):
|
|
105
|
+
"""Web dashboard configuration."""
|
|
106
|
+
host: str = "127.0.0.1"
|
|
107
|
+
port: int = 18890
|
|
108
|
+
auth_token: str = "" # Bearer token for dashboard access
|
|
109
|
+
allowed_hosts: list[str] = Field(default_factory=list) # Extra allowed Host headers
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class ExecToolConfig(BaseModel):
|
|
113
|
+
"""Shell exec tool configuration."""
|
|
114
|
+
timeout: int = 60
|
|
115
|
+
restrict_to_workspace: bool = False # If true, block commands accessing paths outside workspace
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
class ToolsConfig(BaseModel):
|
|
119
|
+
"""Tools configuration."""
|
|
120
|
+
web: WebToolsConfig = Field(default_factory=WebToolsConfig)
|
|
121
|
+
exec: ExecToolConfig = Field(default_factory=ExecToolConfig)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class Config(BaseSettings):
|
|
125
|
+
"""Root configuration for kyber."""
|
|
126
|
+
agents: AgentsConfig = Field(default_factory=AgentsConfig)
|
|
127
|
+
channels: ChannelsConfig = Field(default_factory=ChannelsConfig)
|
|
128
|
+
providers: ProvidersConfig = Field(default_factory=ProvidersConfig)
|
|
129
|
+
gateway: GatewayConfig = Field(default_factory=GatewayConfig)
|
|
130
|
+
dashboard: DashboardConfig = Field(default_factory=DashboardConfig)
|
|
131
|
+
tools: ToolsConfig = Field(default_factory=ToolsConfig)
|
|
132
|
+
|
|
133
|
+
@property
|
|
134
|
+
def workspace_path(self) -> Path:
|
|
135
|
+
"""Get expanded workspace path."""
|
|
136
|
+
return Path(self.agents.defaults.workspace).expanduser()
|
|
137
|
+
|
|
138
|
+
def _preferred_provider(self) -> str | None:
|
|
139
|
+
"""Return the explicitly configured provider, if any."""
|
|
140
|
+
value = (self.agents.defaults.provider or "").strip().lower()
|
|
141
|
+
return value or None
|
|
142
|
+
|
|
143
|
+
def get_api_key(self) -> str | None:
|
|
144
|
+
"""Get API key in priority order, unless a provider is explicitly set."""
|
|
145
|
+
preferred = self._preferred_provider()
|
|
146
|
+
if preferred:
|
|
147
|
+
provider = getattr(self.providers, preferred, None)
|
|
148
|
+
return provider.api_key if provider else None
|
|
149
|
+
return (
|
|
150
|
+
self.providers.openrouter.api_key or
|
|
151
|
+
self.providers.deepseek.api_key or
|
|
152
|
+
self.providers.anthropic.api_key or
|
|
153
|
+
self.providers.openai.api_key or
|
|
154
|
+
self.providers.gemini.api_key or
|
|
155
|
+
self.providers.zhipu.api_key or
|
|
156
|
+
self.providers.groq.api_key or
|
|
157
|
+
self.providers.vllm.api_key or
|
|
158
|
+
None
|
|
159
|
+
)
|
|
160
|
+
|
|
161
|
+
def get_api_base(self) -> str | None:
|
|
162
|
+
"""Get API base URL if using OpenRouter, Zhipu or vLLM."""
|
|
163
|
+
preferred = self._preferred_provider()
|
|
164
|
+
if preferred == "openrouter":
|
|
165
|
+
return self.providers.openrouter.api_base or "https://openrouter.ai/api/v1"
|
|
166
|
+
if preferred == "zhipu":
|
|
167
|
+
return self.providers.zhipu.api_base
|
|
168
|
+
if preferred == "vllm":
|
|
169
|
+
return self.providers.vllm.api_base
|
|
170
|
+
if preferred:
|
|
171
|
+
return None
|
|
172
|
+
if self.providers.openrouter.api_key:
|
|
173
|
+
return self.providers.openrouter.api_base or "https://openrouter.ai/api/v1"
|
|
174
|
+
if self.providers.zhipu.api_key:
|
|
175
|
+
return self.providers.zhipu.api_base
|
|
176
|
+
if self.providers.vllm.api_base:
|
|
177
|
+
return self.providers.vllm.api_base
|
|
178
|
+
return None
|
|
179
|
+
|
|
180
|
+
def get_provider_name(self) -> str | None:
|
|
181
|
+
"""Return the selected provider name based on configured keys."""
|
|
182
|
+
preferred = self._preferred_provider()
|
|
183
|
+
if preferred:
|
|
184
|
+
return preferred if hasattr(self.providers, preferred) else None
|
|
185
|
+
if self.providers.openrouter.api_key:
|
|
186
|
+
return "openrouter"
|
|
187
|
+
if self.providers.deepseek.api_key:
|
|
188
|
+
return "deepseek"
|
|
189
|
+
if self.providers.anthropic.api_key:
|
|
190
|
+
return "anthropic"
|
|
191
|
+
if self.providers.openai.api_key:
|
|
192
|
+
return "openai"
|
|
193
|
+
if self.providers.gemini.api_key:
|
|
194
|
+
return "gemini"
|
|
195
|
+
if self.providers.zhipu.api_key:
|
|
196
|
+
return "zhipu"
|
|
197
|
+
if self.providers.groq.api_key:
|
|
198
|
+
return "groq"
|
|
199
|
+
if self.providers.vllm.api_base or self.providers.vllm.api_key:
|
|
200
|
+
return "vllm"
|
|
201
|
+
return None
|
|
202
|
+
|
|
203
|
+
class Config:
|
|
204
|
+
env_prefix = "KYBER_"
|
|
205
|
+
env_nested_delimiter = "__"
|
kyber/cron/__init__.py
ADDED
kyber/cron/service.py
ADDED
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
"""Cron service for scheduling agent tasks."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import time
|
|
6
|
+
import uuid
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any, Callable, Coroutine
|
|
9
|
+
|
|
10
|
+
from loguru import logger
|
|
11
|
+
|
|
12
|
+
from kyber.cron.types import CronJob, CronJobState, CronPayload, CronSchedule, CronStore
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _now_ms() -> int:
|
|
16
|
+
return int(time.time() * 1000)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _compute_next_run(schedule: CronSchedule, now_ms: int) -> int | None:
|
|
20
|
+
"""Compute next run time in ms."""
|
|
21
|
+
if schedule.kind == "at":
|
|
22
|
+
return schedule.at_ms if schedule.at_ms and schedule.at_ms > now_ms else None
|
|
23
|
+
|
|
24
|
+
if schedule.kind == "every":
|
|
25
|
+
if not schedule.every_ms or schedule.every_ms <= 0:
|
|
26
|
+
return None
|
|
27
|
+
# Next interval from now
|
|
28
|
+
return now_ms + schedule.every_ms
|
|
29
|
+
|
|
30
|
+
if schedule.kind == "cron" and schedule.expr:
|
|
31
|
+
try:
|
|
32
|
+
from croniter import croniter
|
|
33
|
+
cron = croniter(schedule.expr, time.time())
|
|
34
|
+
next_time = cron.get_next()
|
|
35
|
+
return int(next_time * 1000)
|
|
36
|
+
except Exception:
|
|
37
|
+
return None
|
|
38
|
+
|
|
39
|
+
return None
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class CronService:
|
|
43
|
+
"""Service for managing and executing scheduled jobs."""
|
|
44
|
+
|
|
45
|
+
def __init__(
|
|
46
|
+
self,
|
|
47
|
+
store_path: Path,
|
|
48
|
+
on_job: Callable[[CronJob], Coroutine[Any, Any, str | None]] | None = None
|
|
49
|
+
):
|
|
50
|
+
self.store_path = store_path
|
|
51
|
+
self.on_job = on_job # Callback to execute job, returns response text
|
|
52
|
+
self._store: CronStore | None = None
|
|
53
|
+
self._timer_task: asyncio.Task | None = None
|
|
54
|
+
self._running = False
|
|
55
|
+
|
|
56
|
+
def _load_store(self) -> CronStore:
|
|
57
|
+
"""Load jobs from disk."""
|
|
58
|
+
if self._store:
|
|
59
|
+
return self._store
|
|
60
|
+
|
|
61
|
+
if self.store_path.exists():
|
|
62
|
+
try:
|
|
63
|
+
data = json.loads(self.store_path.read_text())
|
|
64
|
+
jobs = []
|
|
65
|
+
for j in data.get("jobs", []):
|
|
66
|
+
jobs.append(CronJob(
|
|
67
|
+
id=j["id"],
|
|
68
|
+
name=j["name"],
|
|
69
|
+
enabled=j.get("enabled", True),
|
|
70
|
+
schedule=CronSchedule(
|
|
71
|
+
kind=j["schedule"]["kind"],
|
|
72
|
+
at_ms=j["schedule"].get("atMs"),
|
|
73
|
+
every_ms=j["schedule"].get("everyMs"),
|
|
74
|
+
expr=j["schedule"].get("expr"),
|
|
75
|
+
tz=j["schedule"].get("tz"),
|
|
76
|
+
),
|
|
77
|
+
payload=CronPayload(
|
|
78
|
+
kind=j["payload"].get("kind", "agent_turn"),
|
|
79
|
+
message=j["payload"].get("message", ""),
|
|
80
|
+
deliver=j["payload"].get("deliver", False),
|
|
81
|
+
channel=j["payload"].get("channel"),
|
|
82
|
+
to=j["payload"].get("to"),
|
|
83
|
+
),
|
|
84
|
+
state=CronJobState(
|
|
85
|
+
next_run_at_ms=j.get("state", {}).get("nextRunAtMs"),
|
|
86
|
+
last_run_at_ms=j.get("state", {}).get("lastRunAtMs"),
|
|
87
|
+
last_status=j.get("state", {}).get("lastStatus"),
|
|
88
|
+
last_error=j.get("state", {}).get("lastError"),
|
|
89
|
+
),
|
|
90
|
+
created_at_ms=j.get("createdAtMs", 0),
|
|
91
|
+
updated_at_ms=j.get("updatedAtMs", 0),
|
|
92
|
+
delete_after_run=j.get("deleteAfterRun", False),
|
|
93
|
+
))
|
|
94
|
+
self._store = CronStore(jobs=jobs)
|
|
95
|
+
except Exception as e:
|
|
96
|
+
logger.warning(f"Failed to load cron store: {e}")
|
|
97
|
+
self._store = CronStore()
|
|
98
|
+
else:
|
|
99
|
+
self._store = CronStore()
|
|
100
|
+
|
|
101
|
+
return self._store
|
|
102
|
+
|
|
103
|
+
def _save_store(self) -> None:
|
|
104
|
+
"""Save jobs to disk."""
|
|
105
|
+
if not self._store:
|
|
106
|
+
return
|
|
107
|
+
|
|
108
|
+
self.store_path.parent.mkdir(parents=True, exist_ok=True)
|
|
109
|
+
|
|
110
|
+
data = {
|
|
111
|
+
"version": self._store.version,
|
|
112
|
+
"jobs": [
|
|
113
|
+
{
|
|
114
|
+
"id": j.id,
|
|
115
|
+
"name": j.name,
|
|
116
|
+
"enabled": j.enabled,
|
|
117
|
+
"schedule": {
|
|
118
|
+
"kind": j.schedule.kind,
|
|
119
|
+
"atMs": j.schedule.at_ms,
|
|
120
|
+
"everyMs": j.schedule.every_ms,
|
|
121
|
+
"expr": j.schedule.expr,
|
|
122
|
+
"tz": j.schedule.tz,
|
|
123
|
+
},
|
|
124
|
+
"payload": {
|
|
125
|
+
"kind": j.payload.kind,
|
|
126
|
+
"message": j.payload.message,
|
|
127
|
+
"deliver": j.payload.deliver,
|
|
128
|
+
"channel": j.payload.channel,
|
|
129
|
+
"to": j.payload.to,
|
|
130
|
+
},
|
|
131
|
+
"state": {
|
|
132
|
+
"nextRunAtMs": j.state.next_run_at_ms,
|
|
133
|
+
"lastRunAtMs": j.state.last_run_at_ms,
|
|
134
|
+
"lastStatus": j.state.last_status,
|
|
135
|
+
"lastError": j.state.last_error,
|
|
136
|
+
},
|
|
137
|
+
"createdAtMs": j.created_at_ms,
|
|
138
|
+
"updatedAtMs": j.updated_at_ms,
|
|
139
|
+
"deleteAfterRun": j.delete_after_run,
|
|
140
|
+
}
|
|
141
|
+
for j in self._store.jobs
|
|
142
|
+
]
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
self.store_path.write_text(json.dumps(data, indent=2))
|
|
146
|
+
|
|
147
|
+
async def start(self) -> None:
|
|
148
|
+
"""Start the cron service."""
|
|
149
|
+
self._running = True
|
|
150
|
+
self._load_store()
|
|
151
|
+
self._recompute_next_runs()
|
|
152
|
+
self._save_store()
|
|
153
|
+
self._arm_timer()
|
|
154
|
+
logger.info(f"Cron service started with {len(self._store.jobs if self._store else [])} jobs")
|
|
155
|
+
|
|
156
|
+
def stop(self) -> None:
|
|
157
|
+
"""Stop the cron service."""
|
|
158
|
+
self._running = False
|
|
159
|
+
if self._timer_task:
|
|
160
|
+
self._timer_task.cancel()
|
|
161
|
+
self._timer_task = None
|
|
162
|
+
|
|
163
|
+
def _recompute_next_runs(self) -> None:
|
|
164
|
+
"""Recompute next run times for all enabled jobs."""
|
|
165
|
+
if not self._store:
|
|
166
|
+
return
|
|
167
|
+
now = _now_ms()
|
|
168
|
+
for job in self._store.jobs:
|
|
169
|
+
if job.enabled:
|
|
170
|
+
job.state.next_run_at_ms = _compute_next_run(job.schedule, now)
|
|
171
|
+
|
|
172
|
+
def _get_next_wake_ms(self) -> int | None:
|
|
173
|
+
"""Get the earliest next run time across all jobs."""
|
|
174
|
+
if not self._store:
|
|
175
|
+
return None
|
|
176
|
+
times = [j.state.next_run_at_ms for j in self._store.jobs
|
|
177
|
+
if j.enabled and j.state.next_run_at_ms]
|
|
178
|
+
return min(times) if times else None
|
|
179
|
+
|
|
180
|
+
def _arm_timer(self) -> None:
|
|
181
|
+
"""Schedule the next timer tick."""
|
|
182
|
+
if self._timer_task:
|
|
183
|
+
self._timer_task.cancel()
|
|
184
|
+
|
|
185
|
+
next_wake = self._get_next_wake_ms()
|
|
186
|
+
if not next_wake or not self._running:
|
|
187
|
+
return
|
|
188
|
+
|
|
189
|
+
delay_ms = max(0, next_wake - _now_ms())
|
|
190
|
+
delay_s = delay_ms / 1000
|
|
191
|
+
|
|
192
|
+
async def tick():
|
|
193
|
+
await asyncio.sleep(delay_s)
|
|
194
|
+
if self._running:
|
|
195
|
+
await self._on_timer()
|
|
196
|
+
|
|
197
|
+
self._timer_task = asyncio.create_task(tick())
|
|
198
|
+
|
|
199
|
+
async def _on_timer(self) -> None:
|
|
200
|
+
"""Handle timer tick - run due jobs."""
|
|
201
|
+
if not self._store:
|
|
202
|
+
return
|
|
203
|
+
|
|
204
|
+
now = _now_ms()
|
|
205
|
+
due_jobs = [
|
|
206
|
+
j for j in self._store.jobs
|
|
207
|
+
if j.enabled and j.state.next_run_at_ms and now >= j.state.next_run_at_ms
|
|
208
|
+
]
|
|
209
|
+
|
|
210
|
+
for job in due_jobs:
|
|
211
|
+
await self._execute_job(job)
|
|
212
|
+
|
|
213
|
+
self._save_store()
|
|
214
|
+
self._arm_timer()
|
|
215
|
+
|
|
216
|
+
async def _execute_job(self, job: CronJob) -> None:
|
|
217
|
+
"""Execute a single job."""
|
|
218
|
+
start_ms = _now_ms()
|
|
219
|
+
logger.info(f"Cron: executing job '{job.name}' ({job.id})")
|
|
220
|
+
|
|
221
|
+
try:
|
|
222
|
+
response = None
|
|
223
|
+
if self.on_job:
|
|
224
|
+
response = await self.on_job(job)
|
|
225
|
+
|
|
226
|
+
job.state.last_status = "ok"
|
|
227
|
+
job.state.last_error = None
|
|
228
|
+
logger.info(f"Cron: job '{job.name}' completed")
|
|
229
|
+
|
|
230
|
+
except Exception as e:
|
|
231
|
+
job.state.last_status = "error"
|
|
232
|
+
job.state.last_error = str(e)
|
|
233
|
+
logger.error(f"Cron: job '{job.name}' failed: {e}")
|
|
234
|
+
|
|
235
|
+
job.state.last_run_at_ms = start_ms
|
|
236
|
+
job.updated_at_ms = _now_ms()
|
|
237
|
+
|
|
238
|
+
# Handle one-shot jobs
|
|
239
|
+
if job.schedule.kind == "at":
|
|
240
|
+
if job.delete_after_run:
|
|
241
|
+
self._store.jobs = [j for j in self._store.jobs if j.id != job.id]
|
|
242
|
+
else:
|
|
243
|
+
job.enabled = False
|
|
244
|
+
job.state.next_run_at_ms = None
|
|
245
|
+
else:
|
|
246
|
+
# Compute next run
|
|
247
|
+
job.state.next_run_at_ms = _compute_next_run(job.schedule, _now_ms())
|
|
248
|
+
|
|
249
|
+
# ========== Public API ==========
|
|
250
|
+
|
|
251
|
+
def list_jobs(self, include_disabled: bool = False) -> list[CronJob]:
|
|
252
|
+
"""List all jobs."""
|
|
253
|
+
store = self._load_store()
|
|
254
|
+
jobs = store.jobs if include_disabled else [j for j in store.jobs if j.enabled]
|
|
255
|
+
return sorted(jobs, key=lambda j: j.state.next_run_at_ms or float('inf'))
|
|
256
|
+
|
|
257
|
+
def add_job(
|
|
258
|
+
self,
|
|
259
|
+
name: str,
|
|
260
|
+
schedule: CronSchedule,
|
|
261
|
+
message: str,
|
|
262
|
+
deliver: bool = False,
|
|
263
|
+
channel: str | None = None,
|
|
264
|
+
to: str | None = None,
|
|
265
|
+
delete_after_run: bool = False,
|
|
266
|
+
) -> CronJob:
|
|
267
|
+
"""Add a new job."""
|
|
268
|
+
store = self._load_store()
|
|
269
|
+
now = _now_ms()
|
|
270
|
+
|
|
271
|
+
job = CronJob(
|
|
272
|
+
id=str(uuid.uuid4())[:8],
|
|
273
|
+
name=name,
|
|
274
|
+
enabled=True,
|
|
275
|
+
schedule=schedule,
|
|
276
|
+
payload=CronPayload(
|
|
277
|
+
kind="agent_turn",
|
|
278
|
+
message=message,
|
|
279
|
+
deliver=deliver,
|
|
280
|
+
channel=channel,
|
|
281
|
+
to=to,
|
|
282
|
+
),
|
|
283
|
+
state=CronJobState(next_run_at_ms=_compute_next_run(schedule, now)),
|
|
284
|
+
created_at_ms=now,
|
|
285
|
+
updated_at_ms=now,
|
|
286
|
+
delete_after_run=delete_after_run,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
store.jobs.append(job)
|
|
290
|
+
self._save_store()
|
|
291
|
+
self._arm_timer()
|
|
292
|
+
|
|
293
|
+
logger.info(f"Cron: added job '{name}' ({job.id})")
|
|
294
|
+
return job
|
|
295
|
+
|
|
296
|
+
def remove_job(self, job_id: str) -> bool:
|
|
297
|
+
"""Remove a job by ID."""
|
|
298
|
+
store = self._load_store()
|
|
299
|
+
before = len(store.jobs)
|
|
300
|
+
store.jobs = [j for j in store.jobs if j.id != job_id]
|
|
301
|
+
removed = len(store.jobs) < before
|
|
302
|
+
|
|
303
|
+
if removed:
|
|
304
|
+
self._save_store()
|
|
305
|
+
self._arm_timer()
|
|
306
|
+
logger.info(f"Cron: removed job {job_id}")
|
|
307
|
+
|
|
308
|
+
return removed
|
|
309
|
+
|
|
310
|
+
def enable_job(self, job_id: str, enabled: bool = True) -> CronJob | None:
|
|
311
|
+
"""Enable or disable a job."""
|
|
312
|
+
store = self._load_store()
|
|
313
|
+
for job in store.jobs:
|
|
314
|
+
if job.id == job_id:
|
|
315
|
+
job.enabled = enabled
|
|
316
|
+
job.updated_at_ms = _now_ms()
|
|
317
|
+
if enabled:
|
|
318
|
+
job.state.next_run_at_ms = _compute_next_run(job.schedule, _now_ms())
|
|
319
|
+
else:
|
|
320
|
+
job.state.next_run_at_ms = None
|
|
321
|
+
self._save_store()
|
|
322
|
+
self._arm_timer()
|
|
323
|
+
return job
|
|
324
|
+
return None
|
|
325
|
+
|
|
326
|
+
async def run_job(self, job_id: str, force: bool = False) -> bool:
|
|
327
|
+
"""Manually run a job."""
|
|
328
|
+
store = self._load_store()
|
|
329
|
+
for job in store.jobs:
|
|
330
|
+
if job.id == job_id:
|
|
331
|
+
if not force and not job.enabled:
|
|
332
|
+
return False
|
|
333
|
+
await self._execute_job(job)
|
|
334
|
+
self._save_store()
|
|
335
|
+
self._arm_timer()
|
|
336
|
+
return True
|
|
337
|
+
return False
|
|
338
|
+
|
|
339
|
+
def status(self) -> dict:
|
|
340
|
+
"""Get service status."""
|
|
341
|
+
store = self._load_store()
|
|
342
|
+
return {
|
|
343
|
+
"enabled": self._running,
|
|
344
|
+
"jobs": len(store.jobs),
|
|
345
|
+
"next_wake_at_ms": self._get_next_wake_ms(),
|
|
346
|
+
}
|